diff --git a/.github/workflows/arm-ttk-validations.yaml b/.github/workflows/arm-ttk-validations.yaml index 57dfb80239d..f56506a2018 100644 --- a/.github/workflows/arm-ttk-validations.yaml +++ b/.github/workflows/arm-ttk-validations.yaml @@ -20,7 +20,7 @@ jobs: steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: - fetch-depth: 0 + fetch-depth: 10 - shell: pwsh id: step1 name: Identify Changes in PR diff --git a/.github/workflows/hyperlinkValidator.yaml b/.github/workflows/hyperlinkValidator.yaml index 8d7e05aa605..4fe751bba2f 100644 --- a/.github/workflows/hyperlinkValidator.yaml +++ b/.github/workflows/hyperlinkValidator.yaml @@ -28,7 +28,7 @@ jobs: env: GeneratedToken: ${{ steps.generate_token.outputs.token }} with: - fetch-depth: 0 + fetch-depth: 10 token: ${{ env.GeneratedToken }} - shell: pwsh id: step1 diff --git a/.github/workflows/slash-command-armttk.yaml b/.github/workflows/slash-command-armttk.yaml index 2ab2a059139..6a9e01fb620 100644 --- a/.github/workflows/slash-command-armttk.yaml +++ b/.github/workflows/slash-command-armttk.yaml @@ -46,7 +46,7 @@ jobs: if: steps.get-pr.outputs.is_fork == 'false' uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: - fetch-depth: 0 + fetch-depth: 10 ref: ${{ steps.get-pr.outputs.head_sha }} persist-credentials: false - shell: pwsh diff --git a/.github/workflows/update-solutions-analyzer.yml b/.github/workflows/update-solutions-analyzer.yml index ece88a816f4..5bd04ab73eb 100644 --- a/.github/workflows/update-solutions-analyzer.yml +++ b/.github/workflows/update-solutions-analyzer.yml @@ -55,23 +55,47 @@ jobs: echo "changed=true" >> $GITHUB_OUTPUT fi - - name: Commit and push changes + - name: Create Pull Request if: steps.check_changes.outputs.changed == 'true' + id: create_pr + uses: peter-evans/create-pull-request@v6 + with: + token: ${{ secrets.GITHUB_TOKEN }} + commit-message: 'chore: Update Solutions Analyzer CSV files and documentation' + branch: solutions-analyzer-update + delete-branch: true + title: 'chore: Update Solutions Analyzer CSV files and documentation' + body: | + ## Automated Solutions Analyzer Update + + This PR contains automated updates to: + - Solutions connector-to-tables mapping CSV + - Solutions issues and exceptions report CSV + - Connector documentation files + + Generated by the Solutions Analyzer workflow. + + **Triggered by:** ${{ github.event_name }} + **Workflow run:** ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + labels: automated, documentation + add-paths: | + Tools/Solutions Analyzer/solutions_connectors_tables_mapping.csv + Tools/Solutions Analyzer/solutions_connectors_tables_issues_and_exceptions_report.csv + Tools/Solutions Analyzer/connector-docs/ + + - name: Enable auto-merge + if: steps.check_changes.outputs.changed == 'true' && steps.create_pr.outputs.pull-request-number != '' run: | - git config --local user.email "github-actions[bot]@users.noreply.github.com" - git config --local user.name "github-actions[bot]" - git add "Tools/Solutions Analyzer/solutions_connectors_tables_mapping.csv" - git add "Tools/Solutions Analyzer/solutions_connectors_tables_issues_and_exceptions_report.csv" - git add "Tools/Solutions Analyzer/connector-docs/" - git commit -m "chore: Update Solutions Analyzer CSV files and documentation [skip ci]" - git push + gh pr merge ${{ steps.create_pr.outputs.pull-request-number }} --auto --squash + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Create summary if: steps.check_changes.outputs.changed == 'true' run: | - echo "### Solutions Analyzer Updated :white_check_mark:" >> $GITHUB_STEP_SUMMARY + echo "### Solutions Analyzer Pull Request Created :white_check_mark:" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY - echo "CSV files and documentation have been regenerated and committed." >> $GITHUB_STEP_SUMMARY + echo "A pull request has been created with updated CSV files and documentation." >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY echo "**Modified files:**" >> $GITHUB_STEP_SUMMARY echo "- Tools/Solutions Analyzer/solutions_connectors_tables_mapping.csv" >> $GITHUB_STEP_SUMMARY diff --git a/.script/tests/KqlvalidationsTests/CustomTables/Corelight_v2_conn_agg_CL.json b/.script/tests/KqlvalidationsTests/CustomTables/Corelight_v2_conn_agg_CL.json new file mode 100644 index 00000000000..d30f0cd53ae --- /dev/null +++ b/.script/tests/KqlvalidationsTests/CustomTables/Corelight_v2_conn_agg_CL.json @@ -0,0 +1,285 @@ +{ + "Name": "Corelight_v2_conn_agg_CL", + "Properties": [ + { + "Name": "TimeGenerated", + "Type": "DateTime" + }, + { + "Name": "_path_s", + "Type": "String" + }, + { + "Name": "_system_name_s", + "Type": "String" + }, + { + "Name": "_write_ts_t", + "Type": "DateTime" + }, + { + "Name": "uid_s", + "Type": "String" + }, + { + "Name": "id_orig_h_s", + "Type": "String" + }, + { + "Name": "id_orig_p_d", + "Type": "Double" + }, + { + "Name": "id_resp_h_s", + "Type": "String" + }, + { + "Name": "id_resp_p_d", + "Type": "Double" + }, + { + "Name": "proto_s", + "Type": "String" + }, + { + "Name": "suri_ids_s", + "Type": "String" + }, + { + "Name": "local_orig_b", + "Type": "Boolean" + }, + { + "Name": "local_resp_b", + "Type": "Boolean" + }, + { + "Name": "id_orig_h_n_s", + "Type": "String" + }, + { + "Name": "id_resp_h_n_s", + "Type": "String" + }, + { + "Name": "community_id_s", + "Type": "String" + }, + { + "Name": "spcap_url_s", + "Type": "String" + }, + { + "Name": "service_s", + "Type": "String" + }, + { + "Name": "app_s", + "Type": "String" + }, + { + "Name": "corelight_shunted_b", + "Type": "Boolean" + }, + { + "Name": "duration_d", + "Type": "Double" + }, + { + "Name": "orig_bytes_d", + "Type": "Double" + }, + { + "Name": "resp_bytes_d", + "Type": "Double" + }, + { + "Name": "missed_bytes_d", + "Type": "Double" + }, + { + "Name": "orig_shunted_pkts_d", + "Type": "Double" + }, + { + "Name": "orig_shunted_bytes_d", + "Type": "Double" + }, + { + "Name": "resp_shunted_pkts_d", + "Type": "Double" + }, + { + "Name": "resp_shunted_bytes_d", + "Type": "Double" + }, + { + "Name": "orig_pkts_d", + "Type": "Double" + }, + { + "Name": "orig_ip_bytes_d", + "Type": "Double" + }, + { + "Name": "resp_pkts_d", + "Type": "Double" + }, + { + "Name": "resp_ip_bytes_d", + "Type": "Double" + }, + { + "Name": "conn_state_s", + "Type": "String" + }, + { + "Name": "history_s", + "Type": "String" + }, + { + "Name": "tunnel_parents_s", + "Type": "String" + }, + { + "Name": "netskope_site_id_s", + "Type": "String" + }, + { + "Name": "netskope_user_id_s", + "Type": "String" + }, + { + "Name": "id_vlan_d", + "Type": "Double" + }, + { + "Name": "vlan_d", + "Type": "Double" + }, + { + "Name": "inner_vlan_d", + "Type": "Double" + }, + { + "Name": "orig_inst_org_id_s", + "Type": "String" + }, + { + "Name": "orig_inst_name_s", + "Type": "String" + }, + { + "Name": "orig_inst_az_s", + "Type": "String" + }, + { + "Name": "orig_inst_vpc_id_s", + "Type": "String" + }, + { + "Name": "orig_inst_subnet_id_s", + "Type": "String" + }, + { + "Name": "orig_inst_sg_ids_s", + "Type": "String" + }, + { + "Name": "orig_inst_project_s", + "Type": "String" + }, + { + "Name": "orig_inst_network_s", + "Type": "String" + }, + { + "Name": "orig_inst_network_tags_s", + "Type": "String" + }, + { + "Name": "orig_inst_id_s", + "Type": "String" + }, + { + "Name": "orig_inst_resource_group_s", + "Type": "String" + }, + { + "Name": "orig_inst_subscription_s", + "Type": "String" + }, + { + "Name": "orig_inst_os_s", + "Type": "String" + }, + { + "Name": "orig_inst_location_s", + "Type": "String" + }, + { + "Name": "orig_inst_nsg_s", + "Type": "String" + }, + { + "Name": "resp_inst_org_id_s", + "Type": "String" + }, + { + "Name": "resp_inst_name_s", + "Type": "String" + }, + { + "Name": "resp_inst_az_s", + "Type": "String" + }, + { + "Name": "resp_inst_vpc_id_s", + "Type": "String" + }, + { + "Name": "resp_inst_subnet_id_s", + "Type": "String" + }, + { + "Name": "resp_inst_sg_ids_s", + "Type": "String" + }, + { + "Name": "resp_inst_project_s", + "Type": "String" + }, + { + "Name": "resp_inst_network_s", + "Type": "String" + }, + { + "Name": "resp_inst_network_tags_s", + "Type": "String" + }, + { + "Name": "resp_inst_id_s", + "Type": "String" + }, + { + "Name": "resp_inst_resource_group_s", + "Type": "String" + }, + { + "Name": "resp_inst_subscription_s", + "Type": "String" + }, + { + "Name": "resp_inst_os_s", + "Type": "String" + }, + { + "Name": "resp_inst_location_s", + "Type": "String" + }, + { + "Name": "resp_inst_nsg_s", + "Type": "String" + } + ] +} diff --git a/.script/tests/KqlvalidationsTests/CustomTables/Corelight_v2_dns_agg_CL.json b/.script/tests/KqlvalidationsTests/CustomTables/Corelight_v2_dns_agg_CL.json new file mode 100644 index 00000000000..1d930557218 --- /dev/null +++ b/.script/tests/KqlvalidationsTests/CustomTables/Corelight_v2_dns_agg_CL.json @@ -0,0 +1,125 @@ +{ + "Name": "Corelight_v2_dns_agg_CL", + "Properties": [ + { + "Name": "TimeGenerated", + "Type": "DateTime" + }, + { + "Name": "_path_s", + "Type": "String" + }, + { + "Name": "_system_name_s", + "Type": "String" + }, + { + "Name": "uid_s", + "Type": "String" + }, + { + "Name": "id_orig_h_s", + "Type": "String" + }, + { + "Name": "id_orig_p_d", + "Type": "Double" + }, + { + "Name": "id_resp_h_s", + "Type": "String" + }, + { + "Name": "id_resp_p_d", + "Type": "Double" + }, + { + "Name": "query_s", + "Type": "String" + }, + { + "Name": "qtype_d", + "Type": "Double" + }, + { + "Name": "answers_s", + "Type": "String" + }, + { + "Name": "rcode_d", + "Type": "Double" + }, + { + "Name": "qtype_name_s", + "Type": "String" + }, + { + "Name": "rcode_name_s", + "Type": "String" + }, + { + "Name": "rejected_b", + "Type": "Boolean" + }, + { + "Name": "proto_s", + "Type": "String" + }, + { + "Name": "trans_id_d", + "Type": "String" + }, + { + "Name": "rtt_d", + "Type": "Double" + }, + { + "Name": "qclass_d", + "Type": "Double" + }, + { + "Name": "qclass_name_s", + "Type": "String" + }, + { + "Name": "AA_b", + "Type": "Boolean" + }, + { + "Name": "RA_b", + "Type": "Boolean" + }, + { + "Name": "RD_b", + "Type": "Boolean" + }, + { + "Name": "TC_b", + "Type": "Boolean" + }, + { + "Name": "TTLs_s", + "Type": "String" + }, + { + "Name": "Z_d", + "Type": "Double" + }, + { + "Name": "icann_domain_s", + "Type": "String" + }, + { + "Name": "icann_host_subdomain_s", + "Type": "String" + }, + { + "Name": "icann_tld_s", + "Type": "String" + }, + { + "Name": "is_trusted_domain_b", + "Type": "Boolean" + } + ] +} diff --git a/.script/tests/KqlvalidationsTests/CustomTables/Corelight_v2_files_agg_CL.json b/.script/tests/KqlvalidationsTests/CustomTables/Corelight_v2_files_agg_CL.json new file mode 100644 index 00000000000..d3063a5a506 --- /dev/null +++ b/.script/tests/KqlvalidationsTests/CustomTables/Corelight_v2_files_agg_CL.json @@ -0,0 +1,109 @@ +{ + "Name": "Corelight_v2_files_agg_CL", + "Properties": [ + { + "Name": "TimeGenerated", + "Type": "DateTime" + }, + { + "Name": "_path_s", + "Type": "String" + }, + { + "Name": "_system_name_s", + "Type": "String" + }, + { + "Name": "uid_s", + "Type": "String" + }, + { + "Name": "id_orig_h_s", + "Type": "String" + }, + { + "Name": "id_orig_p_d", + "Type": "Double" + }, + { + "Name": "id_resp_h_s", + "Type": "String" + }, + { + "Name": "id_resp_p_d", + "Type": "Double" + }, + { + "Name": "source_s", + "Type": "String" + }, + { + "Name": "analyzers_s", + "Type": "String" + }, + { + "Name": "filename_s", + "Type": "String" + }, + { + "Name": "md5_s", + "Type": "String" + }, + { + "Name": "is_orig_b", + "Type": "Boolean" + }, + { + "Name": "local_orig_b", + "Type": "Boolean" + }, + { + "Name": "sha1_s", + "Type": "String" + }, + { + "Name": "sha256_s", + "Type": "String" + }, + { + "Name": "fuid_s", + "Type": "String" + }, + { + "Name": "parent_fuid_s", + "Type": "String" + }, + { + "Name": "mime_type_s", + "Type": "String" + }, + { + "Name": "duration_d", + "Type": "Double" + }, + { + "Name": "seen_bytes_d", + "Type": "Double" + }, + { + "Name": "total_bytes_d", + "Type": "Double" + }, + { + "Name": "missing_bytes_d", + "Type": "Double" + }, + { + "Name": "overflow_bytes_d", + "Type": "Double" + }, + { + "Name": "timedout_b", + "Type": "Boolean" + }, + { + "Name": "depth_d", + "Type": "Double" + } + ] +} diff --git a/.script/tests/KqlvalidationsTests/CustomTables/Corelight_v2_http_agg_CL.json b/.script/tests/KqlvalidationsTests/CustomTables/Corelight_v2_http_agg_CL.json new file mode 100644 index 00000000000..4d2e119cbf6 --- /dev/null +++ b/.script/tests/KqlvalidationsTests/CustomTables/Corelight_v2_http_agg_CL.json @@ -0,0 +1,133 @@ +{ + "Name": "Corelight_v2_http_agg_CL", + "Properties": [ + { + "Name": "TimeGenerated", + "Type": "DateTime" + }, + { + "Name": "_path_s", + "Type": "String" + }, + { + "Name": "_system_name_s", + "Type": "String" + }, + { + "Name": "uid_s", + "Type": "String" + }, + { + "Name": "id_orig_h_s", + "Type": "String" + }, + { + "Name": "id_orig_p_d", + "Type": "Double" + }, + { + "Name": "id_resp_h_s", + "Type": "String" + }, + { + "Name": "id_resp_p_d", + "Type": "Double" + }, + { + "Name": "method_s", + "Type": "String" + }, + { + "Name": "host_s", + "Type": "String" + }, + { + "Name": "uri_s", + "Type": "String" + }, + { + "Name": "referrer_s", + "Type": "String" + }, + { + "Name": "status_code_d", + "Type": "Double" + }, + { + "Name": "status_msg_s", + "Type": "String" + }, + { + "Name": "request_body_len_d", + "Type": "Double" + }, + { + "Name": "response_body_len_d", + "Type": "Double" + }, + { + "Name": "tags_s", + "Type": "String" + }, + { + "Name": "orig_mime_types_s", + "Type": "String" + }, + { + "Name": "resp_mime_types_s", + "Type": "String" + }, + { + "Name": "post_body_s", + "Type": "String" + }, + { + "Name": "orig_fuids_s", + "Type": "String" + }, + { + "Name": "orig_filenames_s", + "Type": "String" + }, + { + "Name": "resp_fuids_s", + "Type": "String" + }, + { + "Name": "resp_filenames_s", + "Type": "String" + }, + { + "Name": "version_s", + "Type": "String" + }, + { + "Name": "user_agent_s", + "Type": "String" + }, + { + "Name": "username_s", + "Type": "String" + }, + { + "Name": "password_s", + "Type": "String" + }, + { + "Name": "proxied_s", + "Type": "String" + }, + { + "Name": "origin_s", + "Type": "String" + }, + { + "Name": "info_code_d", + "Type": "Double" + }, + { + "Name": "info_msg_s", + "Type": "String" + } + ] +} diff --git a/.script/tests/KqlvalidationsTests/CustomTables/Corelight_v2_ssl_agg_CL.json b/.script/tests/KqlvalidationsTests/CustomTables/Corelight_v2_ssl_agg_CL.json new file mode 100644 index 00000000000..1b22e64805d --- /dev/null +++ b/.script/tests/KqlvalidationsTests/CustomTables/Corelight_v2_ssl_agg_CL.json @@ -0,0 +1,93 @@ +{ + "Name": "Corelight_v2_ssl_agg_CL", + "Properties": [ + { + "Name": "TimeGenerated", + "Type": "DateTime" + }, + { + "Name": "_path_s", + "Type": "String" + }, + { + "Name": "_system_name_s", + "Type": "String" + }, + { + "Name": "uid_s", + "Type": "String" + }, + { + "Name": "id_orig_h_s", + "Type": "String" + }, + { + "Name": "id_orig_p_d", + "Type": "Double" + }, + { + "Name": "id_resp_h_s", + "Type": "String" + }, + { + "Name": "id_resp_p_d", + "Type": "Double" + }, + { + "Name": "version_s", + "Type": "String" + }, + { + "Name": "cipher_s", + "Type": "String" + }, + { + "Name": "curve_s", + "Type": "String" + }, + { + "Name": "established_b", + "Type": "Boolean" + }, + { + "Name": "server_name_s", + "Type": "String" + }, + { + "Name": "next_protocol_s", + "Type": "String" + }, + { + "Name": "ssl_history_s", + "Type": "String" + }, + { + "Name": "cert_chain_fps_s", + "Type": "String" + }, + { + "Name": "client_cert_chain_fps_s", + "Type": "String" + }, + { + "Name": "validation_status_s", + "Type": "String" + }, + { + "Name": "ja3_s", + "Type": "String" + }, + { + "Name": "ja3s_s", + "Type": "String" + }, + { + "Name": "resumed_b", + "Type": "Boolean" + }, + { + "Name": "sni_matches_cert_b", + "Type": "Boolean" + } + ] +} diff --git a/.script/tests/KqlvalidationsTests/CustomTables/Corelight_v2_weird_agg_CL.json b/.script/tests/KqlvalidationsTests/CustomTables/Corelight_v2_weird_agg_CL.json new file mode 100644 index 00000000000..ad19feafac7 --- /dev/null +++ b/.script/tests/KqlvalidationsTests/CustomTables/Corelight_v2_weird_agg_CL.json @@ -0,0 +1,57 @@ +{ + "Name": "Corelight_v2_weird_agg_CL", + "Properties": [ + { + "Name": "TimeGenerated", + "Type": "DateTime" + }, + { + "Name": "_path_s", + "Type": "String" + }, + { + "Name": "_system_name_s", + "Type": "String" + }, + { + "Name": "uid_s", + "Type": "String" + }, + { + "Name": "id_orig_h_s", + "Type": "String" + }, + { + "Name": "id_orig_p_d", + "Type": "Double" + }, + { + "Name": "id_resp_h_s", + "Type": "String" + }, + { + "Name": "id_resp_p_d", + "Type": "Double" + }, + { + "Name": "name_s", + "Type": "String" + }, + { + "Name": "addl_s", + "Type": "String" + }, + { + "Name": "notice_b", + "Type": "Boolean" + }, + { + "Name": "peer_s", + "Type": "String" + }, + { + "Name": "source_s", + "Type": "String" + } + ] +} diff --git a/.script/tests/KqlvalidationsTests/CustomTables/GzSecurityEvents_CL.json b/.script/tests/KqlvalidationsTests/CustomTables/GzSecurityEvents_CL.json new file mode 100644 index 00000000000..37c66092b07 --- /dev/null +++ b/.script/tests/KqlvalidationsTests/CustomTables/GzSecurityEvents_CL.json @@ -0,0 +1,29 @@ +{ + "Name": "GzSecurityEvents_CL", + "Properties": [ + { + "name": "TimeGenerated", + "type": "DateTime" + }, + { + "name": "start_time", + "type": "DateTime" + }, + { + "name": "end_time", + "type": "DateTime" + }, + { + "name": "company_id", + "type": "String" + }, + { + "name": "module", + "type": "String" + }, + { + "name": "data", + "type": "Dynamic" + } + ] +} diff --git a/DataConnectors/AWS-S3/ConfigCloudTrailDataConnector.ps1 b/DataConnectors/AWS-S3/ConfigCloudTrailDataConnector.ps1 index 8460e21db03..762af73676a 100644 --- a/DataConnectors/AWS-S3/ConfigCloudTrailDataConnector.ps1 +++ b/DataConnectors/AWS-S3/ConfigCloudTrailDataConnector.ps1 @@ -246,7 +246,9 @@ $sqsArn = ((aws sqs get-queue-attributes --queue-url $sqsUrl --attribute-names Q Write-Log -Message $sqsArn -LogFileName $LogFileName -Severity Verbose $kmsConfirmation = Read-ValidatedHost -Prompt 'Do you want to enable KMS for CloudTrail? [y/n]' -ValidationType Confirm -if ($kmsConfirmation -eq 'y') { +$kmsEnabled = $kmsConfirmation -eq 'y' + +if ($kmsEnabled) { New-KMS $kmsArn = ($kmsKeyDescription | ConvertFrom-Json).KeyMetadata.Arn $kmsKeyId = ($kmsKeyDescription | ConvertFrom-Json).KeyMetadata.KeyId @@ -291,7 +293,7 @@ Set-RetryAction({ $isCloudTrailNotExist = $lastexitcode -ne 0 if ($isCloudTrailNotExist) { - if ($kmsConfirmation -eq 'y') { + if ($kmsEnabled) { Write-Log -Message "Executing: aws cloudtrail create-trail --name $cloudTrailName --s3-bucket-name $bucketName --kms-key-id $kmsKeyId --tags-list $(ConvertTo-Json -InputObject @($(Get-SentinelTagInJsonFormat) | ConvertFrom-Json) -Depth 99 -Compress) 2>&1" -LogFileName $LogFileName -Severity Verbose $tempForOutput = aws cloudtrail create-trail --name $cloudTrailName --s3-bucket-name $bucketName --kms-key-id $kmsKeyId --tags-list $(ConvertTo-Json -InputObject @($(Get-SentinelTagInJsonFormat) | ConvertFrom-Json) -Depth 99 -Compress) 2>&1 Write-Log -Message $tempForOutput -LogFileName $LogFileName -Severity Verbose @@ -309,9 +311,9 @@ Set-RetryAction({ $cloudTrailBucketConfirmation = Read-ValidatedHost "Trail '${cloudTrailName}' is already configured. Do you want to override the bucket destination? [y/n]" if ($cloudTrailBucketConfirmation -eq 'y') { - if ($kmsConfirmation -eq 'y') { - Write-Log -Message "Executing: aws cloudtrail update-trail --name $cloudTrailName --s3-bucket-name $bucketName -kms-key-id $kmsKeyId | Out-Null" -LogFileName $LogFileName -Severity Verbose - aws cloudtrail update-trail --name $cloudTrailName --s3-bucket-name $bucketName -kms-key-id $kmsKeyId | Out-Null + if ($kmsEnabled) { + Write-Log -Message "Executing: aws cloudtrail update-trail --name $cloudTrailName --s3-bucket-name $bucketName --kms-key-id $kmsKeyId | Out-Null" -LogFileName $LogFileName -Severity Verbose + aws cloudtrail update-trail --name $cloudTrailName --s3-bucket-name $bucketName --kms-key-id $kmsKeyId | Out-Null } else { Write-Log -Message "Executing: aws cloudtrail update-trail --name $cloudTrailName --s3-bucket-name $bucketName | Out-Null" -LogFileName $LogFileName -Severity Verbose diff --git a/Logos/Bitdefender.svg b/Logos/Bitdefender.svg new file mode 100644 index 00000000000..58e7a7718b8 --- /dev/null +++ b/Logos/Bitdefender.svg @@ -0,0 +1,64 @@ + + + + + + + + + + diff --git a/Logos/SOCPrime_Logo.svg b/Logos/SOCPrime_Logo.svg new file mode 100644 index 00000000000..1aebffc07e0 --- /dev/null +++ b/Logos/SOCPrime_Logo.svg @@ -0,0 +1,21 @@ + + + + + + + + + + + + + diff --git a/Parsers/ASimAlertEvent/ARM/ASimAlertEvent/ASimAlertEvent.json b/Parsers/ASimAlertEvent/ARM/ASimAlertEvent/ASimAlertEvent.json index 706e67042d0..1a0546593c9 100644 --- a/Parsers/ASimAlertEvent/ARM/ASimAlertEvent/ASimAlertEvent.json +++ b/Parsers/ASimAlertEvent/ARM/ASimAlertEvent/ASimAlertEvent.json @@ -27,7 +27,7 @@ "displayName": "Alert Event ASIM parser", "category": "ASIM", "FunctionAlias": "ASimAlertEvent", - "query": "let DisabledParsers=materialize(_GetWatchlist('ASimDisabledParsers') | where SearchKey in ('Any', 'ExcludeASimAlertEvent') | extend SourceSpecificParser=column_ifexists('SourceSpecificParser','') | distinct SourceSpecificParser| where isnotempty(SourceSpecificParser));\nlet ASimBuiltInDisabled=toscalar('ExcludeASimAlertEvent' in (DisabledParsers) or 'Any' in (DisabledParsers)); \nlet parser=(pack:bool=false){\nunion isfuzzy=true\n vimAlertEventEmpty,\n ASimAlertEventMicrosoftDefenderXDR (disabled=(ASimBuiltInDisabled or ('ExcludeASimAlertEventMicrosoftDefenderXDR' in (DisabledParsers)))),\n ASimAlertEventSentinelOneSingularity (disabled=(ASimBuiltInDisabled or ('ExcludeASimAlertEventSentinelOneSingularity' in (DisabledParsers))))\n}; \nparser (pack=pack)\n", + "query": "let DisabledParsers=materialize(_GetWatchlist('ASimDisabledParsers') | where SearchKey in ('Any', 'ExcludeASimAlertEvent') | extend SourceSpecificParser=column_ifexists('SourceSpecificParser','') | distinct SourceSpecificParser| where isnotempty(SourceSpecificParser));\nlet ASimBuiltInDisabled=toscalar('ExcludeASimAlertEvent' in (DisabledParsers) or 'Any' in (DisabledParsers)); \nlet parser=(pack:bool=false){\nunion isfuzzy=true\n vimAlertEventEmpty,\n ASimAlertEventBitdefenderGravityZone (disabled=(ASimBuiltInDisabled or ('ExcludeASimAlertEventBitdefenderGravityZone' in (DisabledParsers)))),\n ASimAlertEventMicrosoftDefenderXDR (disabled=(ASimBuiltInDisabled or ('ExcludeASimAlertEventMicrosoftDefenderXDR' in (DisabledParsers)))),\n ASimAlertEventSentinelOneSingularity (disabled=(ASimBuiltInDisabled or ('ExcludeASimAlertEventSentinelOneSingularity' in (DisabledParsers))))\n}; \nparser (pack=pack)\n", "version": 1, "functionParameters": "pack:bool=False" } diff --git a/Parsers/ASimAlertEvent/ARM/ASimAlertEventBitdefenderGravityZone/ASimAlertEventBitdefenderGravityZone.json b/Parsers/ASimAlertEvent/ARM/ASimAlertEventBitdefenderGravityZone/ASimAlertEventBitdefenderGravityZone.json new file mode 100644 index 00000000000..61b1afff220 --- /dev/null +++ b/Parsers/ASimAlertEvent/ARM/ASimAlertEventBitdefenderGravityZone/ASimAlertEventBitdefenderGravityZone.json @@ -0,0 +1,36 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-08-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "Workspace": { + "type": "string", + "metadata": { + "description": "The Microsoft Sentinel workspace into which the function will be deployed. Has to be in the selected Resource Group." + } + }, + "WorkspaceRegion": { + "type": "string", + "defaultValue": "[resourceGroup().location]", + "metadata": { + "description": "The region of the selected workspace. The default value will use the Region selection above." + } + } + }, + "resources": [ + { + "type": "Microsoft.OperationalInsights/workspaces/savedSearches", + "apiVersion": "2020-08-01", + "name": "[concat(parameters('Workspace'), '/ASimAlertEventBitdefenderGravityZone')]", + "location": "[parameters('WorkspaceRegion')]", + "properties": { + "etag": "*", + "displayName": "Alert event ASIM parser for Bitdefender GravityZone", + "category": "ASIM", + "FunctionAlias": "ASimAlertEventBitdefenderGravityZone", + "query": "let parser=(disabled:bool=false){\n let allEvents = GzSecurityEvents_CL\n | where not(disabled);\n\n let newIncidentEvents = allEvents\n | where module == \"new-incident\"\n | extend d = data\n // --- Core ASIM fields ---\n | extend\n EventVendor = \"Bitdefender\",\n EventProduct = \"GravityZone\",\n EventSchema = \"AlertEvent\",\n EventSchemaVersion = 1,\n EventType = \"Alert\",\n EventStartTime = start_time,\n EventEndTime = end_time,\n EventUid = tostring(d.incident_id),\n EventSeverity = case(\n d.severity == \"low\", \"Low\",\n d.severity == \"medium\", \"Medium\",\n d.severity == \"high\", \"High\",\n \"Low\" // fallback value if null or unmatched\n ),\n EventCount = 1,\n DvcHostname = d.computer_name,\n DvcFQDN = d.computer_fqdn,\n DvcIpAddr = d.computer_ip,\n DvcId = d.endpointId,\n DvcAction = replace_string(tostring(d.main_action), \"_\", \" \")\n // --- Additional (packed) fields ---\n | extend AdditionalFields = bag_pack(\n \"SeverityScore\", d.severity_score,\n \"DetectionName\", d.detection_name,\n \"FileName\", d.file_name,\n \"FilePath\", d.file_path,\n \"FileHashMd5\", d.file_hash_md5,\n \"FileHashSha256\", d.file_hash_sha256,\n \"URL\", d.url,\n \"Port\", d.port,\n \"Protocol\", d.protocol,\n \"SourceIp\", d.source_ip,\n \"ProcessPid\", d.process_pid,\n \"ProcessPath\", d.process_path,\n \"ParentProcessPid\", d.parent_process_pid,\n \"ParentProcessPath\", d.parent_process_path,\n \"AttackTypes\", d.attack_types,\n \"AttCkId\", d.att_ck_id,\n \"ProcessCommandLine\", d.process_command_line,\n \"Username\", d.username,\n \"UserSid\", d.user_sid,\n \"CompanyId\", company_id,\n \"Module\", module\n )\n // Final ASIM projection (keep core + AdditionalFields)\n | project\n EventVendor,\n EventProduct,\n EventSchema,\n EventSchemaVersion,\n EventType,\n EventStartTime,\n EventEndTime,\n EventUid,\n EventSeverity,\n EventCount,\n DvcHostname,\n DvcFQDN,\n DvcIpAddr,\n DvcId,\n DvcAction,\n AdditionalFields; \n\n let newExtendedIncidentEvents = allEvents\n | where module == \"new-extended-incident\"\n | extend d = data\n // --- Core ASIM fields ---\n | extend\n EventVendor = \"Bitdefender\",\n EventProduct = \"GravityZone\",\n EventSchema = \"AlertEvent\",\n EventSchemaVersion = 1,\n EventType = \"Alert\",\n EventStartTime = start_time,\n EventEndTime = end_time,\n EventUid = tostring(d.incident_id),\n EventSeverity = case(\n d.severity == \"low\", \"Low\",\n d.severity == \"medium\", \"Medium\",\n d.severity == \"high\", \"High\",\n \"Low\" // fallback value if null or unmatched\n ),\n EventCount = 1,\n DvcAction = replace_string(tostring(d.main_action), \"_\", \" \")\n // --- Additional (packed) fields ---\n | extend AdditionalFields = bag_pack(\n \"SeverityScore\", d.severity_score,\n \"IncidentNumber\", d.incident_number,\n \"IncidentVersion\", d.version,\n \"KillchainPhases\", d.killchain_phases,\n \"LastKillchainPhase\", d.last_killchain_phase,\n \"AttackTypes\", d.attack_types,\n \"CorrelatedIncidentIds\", d.correlated_incidents,\n \"Nodes\", d.nodes,\n \"CompanyId\", company_id,\n \"Module\", module\n )\n // Final ASIM projection (keep core + AdditionalFields)\n | project\n EventVendor,\n EventProduct,\n EventSchema,\n EventSchemaVersion,\n EventType,\n EventStartTime,\n EventEndTime,\n EventUid,\n EventSeverity,\n EventCount,\n DvcAction,\n AdditionalFields;\n\n let ransomwareMitigationEvents = allEvents\n | where module == \"ransomware-mitigation\"\n | extend d = data\n // --- Core ASIM fields ---\n | extend\n EventVendor = \"Bitdefender\",\n EventProduct = \"GravityZone\",\n EventSchema = \"AlertEvent\",\n EventSchemaVersion = 1,\n EventType = \"Alert\",\n EventStartTime = start_time,\n EventEndTime = end_time,\n EventUid = _ItemId,\n EventSeverity = \"Informational\",\n EventCount = 1,\n DvcHostname = d.computer_name,\n DvcFQDN = d.computer_fqdn,\n DvcIpAddr = d.computer_ip,\n DvcId = d.endpointId,\n DvcAction = \"Blocked\"\n // --- Additional (packed) fields ---\n | extend AdditionalFields = bag_pack(\n \"CompanyId\", company_id,\n \"CompanyName\", d.company_name,\n \"AttackType\", d.attack_type, // custom, not MITRE value\n \"AttackTypes\", dynamic([]), // mandatory MITRE field\n \"ItemCount\", d.item_count,\n \"AttackSource\", d.attack_source,\n \"EndpointProduct\", d.product_installed,\n \"Module\", module\n )\n // Final ASIM projection (keep core + AdditionalFields)\n | project\n EventVendor,\n EventProduct,\n EventSchema,\n EventSchemaVersion,\n EventType,\n EventStartTime,\n EventEndTime,\n EventUid,\n EventSeverity,\n EventCount,\n DvcId,\n DvcHostname,\n DvcFQDN,\n DvcIpAddr,\n DvcAction,\n AdditionalFields;\n\n let networkSandboxingEvents = allEvents\n | where module == \"network-sandboxing\"\n | extend d = data\n // --- Compute required fields for downstream mapping ---\n | extend MaxRemediationAction = iff(array_length(d.remediationActions) > 0, todouble(array_sort_desc(d.remediationActions)[0]), double(3))\n // --- Core ASIM fields ---\n | extend\n EventVendor = \"Bitdefender\",\n EventProduct = \"GravityZone\",\n EventSchema = \"AlertEvent\",\n EventSchemaVersion = 1,\n EventType = \"Alert\",\n EventStartTime = start_time,\n EventEndTime = end_time,\n EventUid = tostring(d.submissionId),\n \n EventSeverity = case(\n MaxRemediationAction == 3, \"Low\", // report only\n MaxRemediationAction == 2, \"Medium\", // move\n \"High\"// fallback - disinfect or delete\n ),\n EventCount = 1,\n DvcHostname = d.computerName,\n DvcIpAddr = d.computerIp,\n DvcId = d.endpointId\n // --- Additional (packed) fields ---\n | extend AdditionalFields = bag_pack(\n \"CompanyId\", company_id,\n \"DeviceExternalId\", d.deviceExternalId,\n \"ThreatType\", d.threatType,\n \"FilePaths\", d.filePaths,\n \"FileSizes\", d.fileSizes,\n \"RemediationActions\", d.remediationActions,\n \"Module\", module\n )\n // Final ASIM projection (keep core + AdditionalFields)\n | project\n EventVendor,\n EventProduct,\n EventSchema,\n EventSchemaVersion,\n EventType,\n EventStartTime,\n EventEndTime,\n EventUid,\n EventSeverity,\n EventCount,\n DvcId,\n DvcHostname,\n DvcIpAddr,\n AdditionalFields;\n\n\n let exchangeMalwareEvents = allEvents\n | where module == \"exchange-malware\"\n | extend d = data\n // --- Core ASIM fields ---\n | extend\n EventVendor = \"Bitdefender\",\n EventProduct = \"GravityZone\",\n EventSchema = \"AlertEvent\",\n EventSchemaVersion = 1,\n EventType = \"Alert\",\n EventStartTime = start_time,\n EventEndTime = end_time,\n EventUid = _ItemId,\n EventSeverity = \"Informational\",\n EventCount = 1,\n DvcHostname = d.computer_name,\n DvcFQDN = d.computer_fqdn,\n DvcIpAddr = d.computer_ip,\n DvcId = d.endpointId,\n DvcAction = \"Blocked\"\n // --- Additional (packed) fields ---\n | extend AdditionalFields = bag_pack(\n \"CompanyId\", company_id,\n \"Malware\", d.malware,\n \"Subject\", d.subject,\n \"Recipients\", d.recipients,\n \"Sender\", d.sender,\n \"ServerName\", d.serverName,\n \"EndpointProduct\", d.product_installed,\n \"Module\", module\n )\n // Final ASIM projection (keep core + AdditionalFields)\n | project\n EventVendor,\n EventProduct,\n EventSchema,\n EventSchemaVersion,\n EventType,\n EventStartTime,\n EventEndTime,\n EventUid,\n EventSeverity,\n EventCount,\n DvcId,\n DvcHostname,\n DvcFQDN,\n DvcIpAddr,\n DvcAction,\n AdditionalFields;\n\n union newExtendedIncidentEvents, newIncidentEvents, ransomwareMitigationEvents, networkSandboxingEvents, exchangeMalwareEvents\n};\nparser(disabled=disabled)\n", + "version": 1, + "functionParameters": "disabled:bool=False" + } + } + ] +} \ No newline at end of file diff --git a/Parsers/ASimAlertEvent/ARM/ASimAlertEventBitdefenderGravityZone/README.md b/Parsers/ASimAlertEvent/ARM/ASimAlertEventBitdefenderGravityZone/README.md new file mode 100644 index 00000000000..7229e6a48ed --- /dev/null +++ b/Parsers/ASimAlertEvent/ARM/ASimAlertEventBitdefenderGravityZone/README.md @@ -0,0 +1,18 @@ +# Bitdefender GravityZone ASIM AlertEvent Normalization Parser + +ARM template for ASIM AlertEvent schema parser for Bitdefender GravityZone. + +This ASIM parser supports normalizing the Bitdefender GravityZone logs to the ASIM Alert normalized schema. + + +The Advanced Security Information Model (ASIM) enables you to use and create source-agnostic content, simplifying your analysis of the data in your Microsoft Sentinel workspace. + +For more information, see: + +- [Normalization and the Advanced Security Information Model (ASIM)](https://aka.ms/AboutASIM) +- [Deploy all of ASIM](https://aka.ms/DeployASIM) +- [ASIM AlertEvent normalization schema reference](https://aka.ms/ASimAlertEventDoc) + +
+ +[![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FParsers%2FASimAlertEvent%2FARM%2FASimAlertEventBitdefenderGravityZone%2FASimAlertEventBitdefenderGravityZone.json) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://portal.azure.us/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FParsers%2FASimAlertEvent%2FARM%2FASimAlertEventBitdefenderGravityZone%2FASimAlertEventBitdefenderGravityZone.json) diff --git a/Parsers/ASimAlertEvent/ARM/FullDeploymentAlertEvent.json b/Parsers/ASimAlertEvent/ARM/FullDeploymentAlertEvent.json index 511d954dd5f..8abfbb07cc6 100644 --- a/Parsers/ASimAlertEvent/ARM/FullDeploymentAlertEvent.json +++ b/Parsers/ASimAlertEvent/ARM/FullDeploymentAlertEvent.json @@ -38,6 +38,26 @@ } } }, + { + "type": "Microsoft.Resources/deployments", + "apiVersion": "2020-10-01", + "name": "linkedASimAlertEventBitdefenderGravityZone", + "properties": { + "mode": "Incremental", + "templateLink": { + "uri": "https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/ASimAlertEvent/ARM/ASimAlertEventBitdefenderGravityZone/ASimAlertEventBitdefenderGravityZone.json", + "contentVersion": "1.0.0.0" + }, + "parameters": { + "Workspace": { + "value": "[parameters('Workspace')]" + }, + "WorkspaceRegion": { + "value": "[parameters('WorkspaceRegion')]" + } + } + } + }, { "type": "Microsoft.Resources/deployments", "apiVersion": "2020-10-01", @@ -98,6 +118,26 @@ } } }, + { + "type": "Microsoft.Resources/deployments", + "apiVersion": "2020-10-01", + "name": "linkedvimAlertEventBitdefenderGravityZone", + "properties": { + "mode": "Incremental", + "templateLink": { + "uri": "https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/ASimAlertEvent/ARM/vimAlertEventBitdefenderGravityZone/vimAlertEventBitdefenderGravityZone.json", + "contentVersion": "1.0.0.0" + }, + "parameters": { + "Workspace": { + "value": "[parameters('Workspace')]" + }, + "WorkspaceRegion": { + "value": "[parameters('WorkspaceRegion')]" + } + } + } + }, { "type": "Microsoft.Resources/deployments", "apiVersion": "2020-10-01", diff --git a/Parsers/ASimAlertEvent/ARM/imAlertEvent/imAlertEvent.json b/Parsers/ASimAlertEvent/ARM/imAlertEvent/imAlertEvent.json index fd1a55a4955..2f8993270ec 100644 --- a/Parsers/ASimAlertEvent/ARM/imAlertEvent/imAlertEvent.json +++ b/Parsers/ASimAlertEvent/ARM/imAlertEvent/imAlertEvent.json @@ -27,7 +27,7 @@ "displayName": "Alert Event ASIM filtering parser", "category": "ASIM", "FunctionAlias": "imAlertEvent", - "query": "let DisabledParsers=materialize(_GetWatchlist('ASimDisabledParsers') | where SearchKey in ('Any', 'ExcludeimAlertEvent') | extend SourceSpecificParser=column_ifexists('SourceSpecificParser','') | distinct SourceSpecificParser | where isnotempty(SourceSpecificParser));\nlet vimBuiltInDisabled=toscalar('ExcludevimAlertEvent' in (DisabledParsers) or 'Any' in (DisabledParsers)); \nlet parser=(\n starttime: datetime=datetime(null), \n endtime: datetime=datetime(null), \n ipaddr_has_any_prefix: dynamic=dynamic([]),\n hostname_has_any: dynamic=dynamic([]),\n username_has_any: dynamic=dynamic([]),\n attacktactics_has_any: dynamic=dynamic([]),\n attacktechniques_has_any: dynamic=dynamic([]),\n threatcategory_has_any: dynamic=dynamic([]),\n alertverdict_has_any: dynamic=dynamic([]),\n eventseverity_has_any: dynamic=dynamic([]),\n pack:bool=false)\n{\nunion isfuzzy=true\n vimAlertEventEmpty,\n vimAlertEventMicrosoftDefenderXDR (starttime=starttime, endtime=endtime, ipaddr_has_any_prefix=ipaddr_has_any_prefix, hostname_has_any=hostname_has_any, username_has_any=username_has_any, attacktactics_has_any=attacktactics_has_any, attacktechniques_has_any=attacktechniques_has_any, threatcategory_has_any=threatcategory_has_any, alertverdict_has_any=alertverdict_has_any, eventseverity_has_any=eventseverity_has_any, disabled=(vimBuiltInDisabled or ('ExcludevimAlertMicrosoftDefenderXDR' in (DisabledParsers)))),\n vimAlertEventSentinelOneSingularity (starttime=starttime, endtime=endtime, ipaddr_has_any_prefix=ipaddr_has_any_prefix, hostname_has_any=hostname_has_any, username_has_any=username_has_any, attacktactics_has_any=attacktactics_has_any, attacktechniques_has_any=attacktechniques_has_any, threatcategory_has_any=threatcategory_has_any, alertverdict_has_any=alertverdict_has_any, eventseverity_has_any=eventseverity_has_any, disabled=(vimBuiltInDisabled or ('ExcludevimAlertSentinelOneSingularity' in (DisabledParsers))))\n};\nparser (starttime=starttime, endtime=endtime, ipaddr_has_any_prefix=ipaddr_has_any_prefix, hostname_has_any=hostname_has_any, username_has_any=username_has_any, attacktactics_has_any=attacktactics_has_any, attacktechniques_has_any=attacktechniques_has_any, threatcategory_has_any=threatcategory_has_any, alertverdict_has_any=alertverdict_has_any, eventseverity_has_any=eventseverity_has_any, pack=pack)\n", + "query": "let DisabledParsers=materialize(_GetWatchlist('ASimDisabledParsers') | where SearchKey in ('Any', 'ExcludeimAlertEvent') | extend SourceSpecificParser=column_ifexists('SourceSpecificParser','') | distinct SourceSpecificParser | where isnotempty(SourceSpecificParser));\nlet vimBuiltInDisabled=toscalar('ExcludevimAlertEvent' in (DisabledParsers) or 'Any' in (DisabledParsers)); \nlet parser=(\n starttime: datetime=datetime(null), \n endtime: datetime=datetime(null), \n ipaddr_has_any_prefix: dynamic=dynamic([]),\n hostname_has_any: dynamic=dynamic([]),\n username_has_any: dynamic=dynamic([]),\n attacktactics_has_any: dynamic=dynamic([]),\n attacktechniques_has_any: dynamic=dynamic([]),\n threatcategory_has_any: dynamic=dynamic([]),\n alertverdict_has_any: dynamic=dynamic([]),\n eventseverity_has_any: dynamic=dynamic([]),\n pack:bool=false)\n{\nunion isfuzzy=true\n vimAlertEventEmpty,\n vimAlertEventBitdefenderGravityZone (starttime=starttime, endtime=endtime, ipaddr_has_any_prefix=ipaddr_has_any_prefix, hostname_has_any=hostname_has_any, username_has_any=username_has_any, attacktactics_has_any=attacktactics_has_any, attacktechniques_has_any=attacktechniques_has_any, threatcategory_has_any=threatcategory_has_any, alertverdict_has_any=alertverdict_has_any, eventseverity_has_any=eventseverity_has_any, disabled=(vimBuiltInDisabled or ('ExcludevimAlertBitdefenderGravityZone' in (DisabledParsers)))),\n vimAlertEventMicrosoftDefenderXDR (starttime=starttime, endtime=endtime, ipaddr_has_any_prefix=ipaddr_has_any_prefix, hostname_has_any=hostname_has_any, username_has_any=username_has_any, attacktactics_has_any=attacktactics_has_any, attacktechniques_has_any=attacktechniques_has_any, threatcategory_has_any=threatcategory_has_any, alertverdict_has_any=alertverdict_has_any, eventseverity_has_any=eventseverity_has_any, disabled=(vimBuiltInDisabled or ('ExcludevimAlertMicrosoftDefenderXDR' in (DisabledParsers)))),\n vimAlertEventSentinelOneSingularity (starttime=starttime, endtime=endtime, ipaddr_has_any_prefix=ipaddr_has_any_prefix, hostname_has_any=hostname_has_any, username_has_any=username_has_any, attacktactics_has_any=attacktactics_has_any, attacktechniques_has_any=attacktechniques_has_any, threatcategory_has_any=threatcategory_has_any, alertverdict_has_any=alertverdict_has_any, eventseverity_has_any=eventseverity_has_any, disabled=(vimBuiltInDisabled or ('ExcludevimAlertSentinelOneSingularity' in (DisabledParsers))))\n};\nparser (starttime=starttime, endtime=endtime, ipaddr_has_any_prefix=ipaddr_has_any_prefix, hostname_has_any=hostname_has_any, username_has_any=username_has_any, attacktactics_has_any=attacktactics_has_any, attacktechniques_has_any=attacktechniques_has_any, threatcategory_has_any=threatcategory_has_any, alertverdict_has_any=alertverdict_has_any, eventseverity_has_any=eventseverity_has_any, pack=pack)\n", "version": 1, "functionParameters": "starttime:datetime=datetime(null),endtime:datetime=datetime(null),ipaddr_has_any_prefix:dynamic=dynamic([]),hostname_has_any:dynamic=dynamic([]),username_has_any:dynamic=dynamic([]),attacktactics_has_any:dynamic=dynamic([]),attacktechniques_has_any:dynamic=dynamic([]),threatcategory_has_any:dynamic=dynamic([]),alertverdict_has_any:dynamic=dynamic([]),eventseverity_has_any:dynamic=dynamic([]),pack:bool=False" } diff --git a/Parsers/ASimAlertEvent/ARM/vimAlertEventBitdefenderGravityZone/README.md b/Parsers/ASimAlertEvent/ARM/vimAlertEventBitdefenderGravityZone/README.md new file mode 100644 index 00000000000..7acfceb7b59 --- /dev/null +++ b/Parsers/ASimAlertEvent/ARM/vimAlertEventBitdefenderGravityZone/README.md @@ -0,0 +1,18 @@ +# Bitdefender GravityZone ASIM AlertEvent Normalization Parser + +ARM template for ASIM AlertEvent schema parser for Bitdefender GravityZone. + +This ASIM parser supports normalizing and filtering the Bitdefender GravityZone logs to the ASIM Alert normalized schema. + + +The Advanced Security Information Model (ASIM) enables you to use and create source-agnostic content, simplifying your analysis of the data in your Microsoft Sentinel workspace. + +For more information, see: + +- [Normalization and the Advanced Security Information Model (ASIM)](https://aka.ms/AboutASIM) +- [Deploy all of ASIM](https://aka.ms/DeployASIM) +- [ASIM AlertEvent normalization schema reference](https://aka.ms/ASimAlertEventDoc) + +
+ +[![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FParsers%2FASimAlertEvent%2FARM%2FvimAlertEventBitdefenderGravityZone%2FvimAlertEventBitdefenderGravityZone.json) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://portal.azure.us/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FParsers%2FASimAlertEvent%2FARM%2FvimAlertEventBitdefenderGravityZone%2FvimAlertEventBitdefenderGravityZone.json) diff --git a/Parsers/ASimAlertEvent/ARM/vimAlertEventBitdefenderGravityZone/vimAlertEventBitdefenderGravityZone.json b/Parsers/ASimAlertEvent/ARM/vimAlertEventBitdefenderGravityZone/vimAlertEventBitdefenderGravityZone.json new file mode 100644 index 00000000000..9b08a71f9e3 --- /dev/null +++ b/Parsers/ASimAlertEvent/ARM/vimAlertEventBitdefenderGravityZone/vimAlertEventBitdefenderGravityZone.json @@ -0,0 +1,36 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-08-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "Workspace": { + "type": "string", + "metadata": { + "description": "The Microsoft Sentinel workspace into which the function will be deployed. Has to be in the selected Resource Group." + } + }, + "WorkspaceRegion": { + "type": "string", + "defaultValue": "[resourceGroup().location]", + "metadata": { + "description": "The region of the selected workspace. The default value will use the Region selection above." + } + } + }, + "resources": [ + { + "type": "Microsoft.OperationalInsights/workspaces/savedSearches", + "apiVersion": "2020-08-01", + "name": "[concat(parameters('Workspace'), '/vimAlertEventBitdefenderGravityZone')]", + "location": "[parameters('WorkspaceRegion')]", + "properties": { + "etag": "*", + "displayName": "Alert Event ASIM filtering parser for Bitdefender GravityZone", + "category": "ASIM", + "FunctionAlias": "vimAlertEventBitdefenderGravityZone", + "query": "let parser = (\n starttime: datetime=datetime(null), \n endtime: datetime=datetime(null), \n ipaddr_has_any_prefix: dynamic=dynamic([]),\n hostname_has_any: dynamic=dynamic([]),\n username_has_any: dynamic=dynamic([]),\n attacktactics_has_any: dynamic=dynamic([]),\n attacktechniques_has_any: dynamic=dynamic([]),\n threatcategory_has_any: dynamic=dynamic([]),\n alertverdict_has_any: dynamic=dynamic([]),\n eventseverity_has_any: dynamic=dynamic([]),\n disabled: bool=false) {\n\n // Filtering based on the parameters above\n\n let allEvents = GzSecurityEvents_CL\n | where not(disabled)\n | where (isnull(starttime) or start_time >= starttime)\n | where (isnull(endtime) or end_time <= endtime);\n\n let newIncidentEvents = allEvents\n | where module == \"new-incident\"\n | extend d = data\n // --- Core ASIM fields ---\n | extend\n EventVendor = \"Bitdefender\",\n EventProduct = \"GravityZone\",\n EventSchema = \"AlertEvent\",\n EventSchemaVersion = 1,\n EventType = \"Alert\",\n EventStartTime = start_time,\n EventEndTime = end_time,\n EventUid = tostring(d.incident_id),\n EventSeverity = case(\n d.severity == \"low\", \"Low\",\n d.severity == \"medium\", \"Medium\",\n d.severity == \"high\", \"High\",\n \"Low\" // fallback value if null or unmatched\n ),\n EventCount = 1,\n DvcHostname = d.computer_name,\n DvcFQDN = d.computer_fqdn,\n DvcIpAddr = d.computer_ip,\n DvcId = d.endpointId,\n DvcAction = replace_string(tostring(d.main_action), \"_\", \" \")\n // --- Additional (packed) fields ---\n | extend AdditionalFields = bag_pack(\n \"SeverityScore\", d.severity_score,\n \"DetectionName\", d.detection_name,\n \"FileName\", d.file_name,\n \"FilePath\", d.file_path,\n \"FileHashMd5\", d.file_hash_md5,\n \"FileHashSha256\", d.file_hash_sha256,\n \"URL\", d.url,\n \"Port\", d.port,\n \"Protocol\", d.protocol,\n \"SourceIp\", d.source_ip,\n \"ProcessPid\", d.process_pid,\n \"ProcessPath\", d.process_path,\n \"ParentProcessPid\", d.parent_process_pid,\n \"ParentProcessPath\", d.parent_process_path,\n \"AttackTypes\", d.attack_types,\n \"AttCkId\", d.att_ck_id,\n \"ProcessCommandLine\", d.process_command_line,\n \"Username\", d.username,\n \"UserSid\", d.user_sid,\n \"CompanyId\", company_id,\n \"Module\", \"EDR\"\n )\n // Final ASIM projection (keep core + AdditionalFields)\n | project\n EventVendor,\n EventProduct,\n EventSchema,\n EventSchemaVersion,\n EventType,\n EventStartTime,\n EventEndTime,\n EventUid,\n EventSeverity,\n EventCount,\n DvcHostname,\n DvcFQDN,\n DvcIpAddr,\n DvcId,\n DvcAction,\n AdditionalFields;\n \n let newExtendedIncidentEvents = allEvents\n | where module == \"new-extended-incident\"\n | extend d = data\n // --- Core ASIM fields ---\n | extend\n EventVendor = \"Bitdefender\",\n EventProduct = \"GravityZone\",\n EventSchema = \"AlertEvent\",\n EventSchemaVersion = 1,\n EventType = \"Alert\",\n EventStartTime = start_time,\n EventEndTime = end_time,\n EventUid = tostring(d.incident_id),\n EventSeverity = case(\n d.severity == \"low\", \"Low\",\n d.severity == \"medium\", \"Medium\",\n d.severity == \"high\", \"High\",\n \"Low\" // fallback value if null or unmatched\n ),\n EventCount = 1,\n DvcAction = replace_string(tostring(d.main_action), \"_\", \" \")\n // --- Additional (packed) fields ---\n | extend AdditionalFields = bag_pack(\n \"SeverityScore\", d.severity_score,\n \"IncidentNumber\", d.incident_number,\n \"IncidentVersion\", d.version,\n \"KillchainPhases\", d.killchain_phases,\n \"LastKillchainPhase\", d.last_killchain_phase,\n \"AttackTypes\", d.attack_types,\n \"CorrelatedIncidentIds\", d.correlated_incidents,\n \"Nodes\", d.nodes,\n \"CompanyId\", company_id,\n \"Module\", \"XDR\"\n )\n // Final ASIM projection (keep core + AdditionalFields)\n | project\n EventVendor,\n EventProduct,\n EventSchema,\n EventSchemaVersion,\n EventType,\n EventStartTime,\n EventEndTime,\n EventUid,\n EventSeverity,\n EventCount,\n DvcAction,\n AdditionalFields;\n\n let ransomwareMitigationEvents = allEvents\n | where module == \"ransomware-mitigation\"\n | extend d = data\n // --- Core ASIM fields ---\n | extend\n EventVendor = \"Bitdefender\",\n EventProduct = \"GravityZone\",\n EventSchema = \"AlertEvent\",\n EventSchemaVersion = 1,\n EventType = \"Alert\",\n EventStartTime = start_time,\n EventEndTime = end_time,\n EventUid = _ItemId,\n EventSeverity = \"Informational\",\n EventCount = 1,\n DvcHostname = d.computer_name,\n DvcFQDN = d.computer_fqdn,\n DvcIpAddr = d.computer_ip,\n DvcId = d.endpointId,\n DvcAction = \"Blocked\"\n // --- Additional (packed) fields ---\n | extend AdditionalFields = bag_pack(\n \"CompanyId\", company_id,\n \"CompanyName\", d.company_name,\n \"AttackType\", d.attack_type, // custom, not MITRE value\n \"AttackTypes\", dynamic([]), // mandatory MITRE field\n \"ItemCount\", d.item_count,\n \"AttackSource\", d.attack_source,\n \"EndpointProduct\", d.product_installed,\n \"Module\", module\n )\n // Final ASIM projection (keep core + AdditionalFields)\n | project\n EventVendor,\n EventProduct,\n EventSchema,\n EventSchemaVersion,\n EventType,\n EventStartTime,\n EventEndTime,\n EventUid,\n EventSeverity,\n EventCount,\n DvcId,\n DvcHostname,\n DvcFQDN,\n DvcIpAddr,\n DvcAction,\n AdditionalFields;\n \n let networkSandboxingEvents = allEvents\n | where module == \"network-sandboxing\"\n | extend d = data\n // --- Compute required fields for downstream mapping ---\n | extend MaxRemediationAction = iff(array_length(d.remediationActions) > 0, todouble(array_sort_desc(d.remediationActions)[0]), double(3))\n // --- Core ASIM fields ---\n | extend\n EventVendor = \"Bitdefender\",\n EventProduct = \"GravityZone\",\n EventSchema = \"AlertEvent\",\n EventSchemaVersion = 1,\n EventType = \"Alert\",\n EventStartTime = start_time,\n EventEndTime = end_time,\n EventUid = tostring(d.submissionId),\n EventSeverity = case(\n MaxRemediationAction == 3, \"Low\", // report only\n MaxRemediationAction == 2, \"Medium\", // move\n \"High\"// fallback - disinfect or delete\n ),\n EventCount = 1,\n DvcHostname = d.computerName,\n DvcIpAddr = d.computerIp,\n DvcId = d.endpointId\n // --- Additional (packed) fields ---\n | extend AdditionalFields = bag_pack(\n \"CompanyId\", company_id,\n \"DeviceExternalId\", d.deviceExternalId,\n \"ThreatType\", d.threatType,\n \"FilePaths\", d.filePaths,\n \"FileSizes\", d.fileSizes,\n \"RemediationActions\", d.remediationActions,\n \"AttackTypes\", dynamic([]), // mandatory MITRE field\n \"Module\", module\n )\n // Final ASIM projection (keep core + AdditionalFields)\n | project\n EventVendor,\n EventProduct,\n EventSchema,\n EventSchemaVersion,\n EventType,\n EventStartTime,\n EventEndTime,\n EventUid,\n EventSeverity,\n EventCount,\n DvcId,\n DvcHostname,\n DvcIpAddr,\n AdditionalFields;\n \n \n let exchangeMalwareEvents = allEvents\n | where module == \"exchange-malware\"\n | extend d = data\n // --- Core ASIM fields ---\n | extend\n EventVendor = \"Bitdefender\",\n EventProduct = \"GravityZone\",\n EventSchema = \"AlertEvent\",\n EventSchemaVersion = 1,\n EventType = \"Alert\",\n EventStartTime = start_time,\n EventEndTime = end_time,\n EventUid = _ItemId,\n EventSeverity = \"Informational\",\n EventCount = 1,\n DvcHostname = d.computer_name,\n DvcFQDN = d.computer_fqdn,\n DvcIpAddr = d.computer_ip,\n DvcId = d.endpointId,\n DvcAction = \"Blocked\"\n // --- Additional (packed) fields ---\n | extend AdditionalFields = bag_pack(\n \"CompanyId\", company_id,\n \"Malware\", d.malware,\n \"Subject\", d.subject,\n \"Recipients\", d.recipients,\n \"Sender\", d.sender,\n \"ServerName\", d.serverName,\n \"EndpointProduct\", d.product_installed,\n \"AttackTypes\", dynamic([]), // mandatory MITRE field\n \"Module\", module\n )\n // Final ASIM projection (keep core + AdditionalFields)\n | project\n EventVendor,\n EventProduct,\n EventSchema,\n EventSchemaVersion,\n EventType,\n EventStartTime,\n EventEndTime,\n EventUid,\n EventSeverity,\n EventCount,\n DvcId,\n DvcHostname,\n DvcFQDN,\n DvcIpAddr,\n DvcAction,\n AdditionalFields;\n\n union newExtendedIncidentEvents, newIncidentEvents, ransomwareMitigationEvents, networkSandboxingEvents, exchangeMalwareEvents\n | where (array_length(ipaddr_has_any_prefix) == 0 or array_length(array_intersect(ipaddr_has_any_prefix, pack_array(tostring(DvcIpAddr)))) > 0)\n | where (array_length(hostname_has_any) == 0 or array_length(array_intersect(hostname_has_any, pack_array(tostring(DvcHostname)))) > 0)\n | where (array_length(attacktactics_has_any) == 0 or array_length(array_intersect(attacktactics_has_any, pack_array(tostring(AdditionalFields.AttackTypes)))) > 0)\n | where (array_length(alertverdict_has_any) == 0 or array_length(array_intersect(alertverdict_has_any, pack_array(tostring(DvcAction)))) > 0)\n | where (array_length(eventseverity_has_any) == 0 or array_length(array_intersect(eventseverity_has_any, pack_array(tostring(EventSeverity)))) > 0);\n\n};\nparser(\n starttime = starttime,\n endtime = endtime,\n ipaddr_has_any_prefix = ipaddr_has_any_prefix,\n hostname_has_any = hostname_has_any,\n username_has_any = username_has_any,\n attacktactics_has_any = attacktactics_has_any,\n attacktechniques_has_any = attacktechniques_has_any,\n threatcategory_has_any = threatcategory_has_any,\n alertverdict_has_any = alertverdict_has_any,\n eventseverity_has_any = eventseverity_has_any,\n disabled = disabled\n)\n", + "version": 1, + "functionParameters": "starttime:datetime=datetime(null),endtime:datetime=datetime(null),disabled:bool=False" + } + } + ] +} \ No newline at end of file diff --git a/Parsers/ASimAlertEvent/Parsers/ASimAlertEvent.yaml b/Parsers/ASimAlertEvent/Parsers/ASimAlertEvent.yaml index c2a89f06e56..2398b8e3508 100644 --- a/Parsers/ASimAlertEvent/Parsers/ASimAlertEvent.yaml +++ b/Parsers/ASimAlertEvent/Parsers/ASimAlertEvent.yaml @@ -18,6 +18,7 @@ ParserName: ASimAlertEvent EquivalentBuiltInParser: _ASim_AlertEvent Parsers: - _Im_AlertEvent_Empty + - _ASim_AlertEvent_BitdefenderGravityZone - _ASim_AlertEvent_MicrosoftDefenderXDR - _ASim_AlertEvent_SentinelOneSingularity ParserParams: @@ -30,6 +31,7 @@ ParserQuery: | let parser=(pack:bool=false){ union isfuzzy=true vimAlertEventEmpty, + ASimAlertEventBitdefenderGravityZone (disabled=(ASimBuiltInDisabled or ('ExcludeASimAlertEventBitdefenderGravityZone' in (DisabledParsers)))), ASimAlertEventMicrosoftDefenderXDR (disabled=(ASimBuiltInDisabled or ('ExcludeASimAlertEventMicrosoftDefenderXDR' in (DisabledParsers)))), ASimAlertEventSentinelOneSingularity (disabled=(ASimBuiltInDisabled or ('ExcludeASimAlertEventSentinelOneSingularity' in (DisabledParsers)))) }; diff --git a/Parsers/ASimAlertEvent/Parsers/ASimAlertEventBitdefenderGravityZone.yaml b/Parsers/ASimAlertEvent/Parsers/ASimAlertEventBitdefenderGravityZone.yaml new file mode 100644 index 00000000000..661dbff5886 --- /dev/null +++ b/Parsers/ASimAlertEvent/Parsers/ASimAlertEventBitdefenderGravityZone.yaml @@ -0,0 +1,300 @@ +Parser: + Title: Alert event ASIM parser for Bitdefender GravityZone + Version: '0.1.0' + LastUpdated: Sep 15, 2025 +Product: + Name: Bitdefender GravityZone +Normalization: + Schema: AlertEvent + Version: '0.1' +References: + - Title: ASIM Alert Schema + Link: https://aka.ms/ASimAlertEventDoc + - Title: ASIM + Link: https://aka.ms/AboutASIM +Description: | + This ASIM parser supports normalizing the Bitdefender GravityZone logs to the ASIM Alert normalized schema. +ParserName: ASimAlertEventBitdefenderGravityZone +EquivalentBuiltInParser: _ASim_AlertEvent_BitdefenderGravityZone +ParserParams: + - Name: disabled + Type: bool + Default: false +ParserQuery: | + let parser=(disabled:bool=false){ + let allEvents = GzSecurityEvents_CL + | where not(disabled); + + let newIncidentEvents = allEvents + | where module == "new-incident" + | extend d = data + // --- Core ASIM fields --- + | extend + EventVendor = "Bitdefender", + EventProduct = "GravityZone", + EventSchema = "AlertEvent", + EventSchemaVersion = 1, + EventType = "Alert", + EventStartTime = start_time, + EventEndTime = end_time, + EventUid = tostring(d.incident_id), + EventSeverity = case( + d.severity == "low", "Low", + d.severity == "medium", "Medium", + d.severity == "high", "High", + "Low" // fallback value if null or unmatched + ), + EventCount = 1, + DvcHostname = d.computer_name, + DvcFQDN = d.computer_fqdn, + DvcIpAddr = d.computer_ip, + DvcId = d.endpointId, + DvcAction = replace_string(tostring(d.main_action), "_", " ") + // --- Additional (packed) fields --- + | extend AdditionalFields = bag_pack( + "SeverityScore", d.severity_score, + "DetectionName", d.detection_name, + "FileName", d.file_name, + "FilePath", d.file_path, + "FileHashMd5", d.file_hash_md5, + "FileHashSha256", d.file_hash_sha256, + "URL", d.url, + "Port", d.port, + "Protocol", d.protocol, + "SourceIp", d.source_ip, + "ProcessPid", d.process_pid, + "ProcessPath", d.process_path, + "ParentProcessPid", d.parent_process_pid, + "ParentProcessPath", d.parent_process_path, + "AttackTypes", d.attack_types, + "AttCkId", d.att_ck_id, + "ProcessCommandLine", d.process_command_line, + "Username", d.username, + "UserSid", d.user_sid, + "CompanyId", company_id, + "Module", module + ) + // Final ASIM projection (keep core + AdditionalFields) + | project + EventVendor, + EventProduct, + EventSchema, + EventSchemaVersion, + EventType, + EventStartTime, + EventEndTime, + EventUid, + EventSeverity, + EventCount, + DvcHostname, + DvcFQDN, + DvcIpAddr, + DvcId, + DvcAction, + AdditionalFields; + + let newExtendedIncidentEvents = allEvents + | where module == "new-extended-incident" + | extend d = data + // --- Core ASIM fields --- + | extend + EventVendor = "Bitdefender", + EventProduct = "GravityZone", + EventSchema = "AlertEvent", + EventSchemaVersion = 1, + EventType = "Alert", + EventStartTime = start_time, + EventEndTime = end_time, + EventUid = tostring(d.incident_id), + EventSeverity = case( + d.severity == "low", "Low", + d.severity == "medium", "Medium", + d.severity == "high", "High", + "Low" // fallback value if null or unmatched + ), + EventCount = 1, + DvcAction = replace_string(tostring(d.main_action), "_", " ") + // --- Additional (packed) fields --- + | extend AdditionalFields = bag_pack( + "SeverityScore", d.severity_score, + "IncidentNumber", d.incident_number, + "IncidentVersion", d.version, + "KillchainPhases", d.killchain_phases, + "LastKillchainPhase", d.last_killchain_phase, + "AttackTypes", d.attack_types, + "CorrelatedIncidentIds", d.correlated_incidents, + "Nodes", d.nodes, + "CompanyId", company_id, + "Module", module + ) + // Final ASIM projection (keep core + AdditionalFields) + | project + EventVendor, + EventProduct, + EventSchema, + EventSchemaVersion, + EventType, + EventStartTime, + EventEndTime, + EventUid, + EventSeverity, + EventCount, + DvcAction, + AdditionalFields; + + let ransomwareMitigationEvents = allEvents + | where module == "ransomware-mitigation" + | extend d = data + // --- Core ASIM fields --- + | extend + EventVendor = "Bitdefender", + EventProduct = "GravityZone", + EventSchema = "AlertEvent", + EventSchemaVersion = 1, + EventType = "Alert", + EventStartTime = start_time, + EventEndTime = end_time, + EventUid = _ItemId, + EventSeverity = "Informational", + EventCount = 1, + DvcHostname = d.computer_name, + DvcFQDN = d.computer_fqdn, + DvcIpAddr = d.computer_ip, + DvcId = d.endpointId, + DvcAction = "Blocked" + // --- Additional (packed) fields --- + | extend AdditionalFields = bag_pack( + "CompanyId", company_id, + "CompanyName", d.company_name, + "AttackType", d.attack_type, // custom, not MITRE value + "AttackTypes", dynamic([]), // mandatory MITRE field + "ItemCount", d.item_count, + "AttackSource", d.attack_source, + "EndpointProduct", d.product_installed, + "Module", module + ) + // Final ASIM projection (keep core + AdditionalFields) + | project + EventVendor, + EventProduct, + EventSchema, + EventSchemaVersion, + EventType, + EventStartTime, + EventEndTime, + EventUid, + EventSeverity, + EventCount, + DvcId, + DvcHostname, + DvcFQDN, + DvcIpAddr, + DvcAction, + AdditionalFields; + + let networkSandboxingEvents = allEvents + | where module == "network-sandboxing" + | extend d = data + // --- Compute required fields for downstream mapping --- + | extend MaxRemediationAction = iff(array_length(d.remediationActions) > 0, todouble(array_sort_desc(d.remediationActions)[0]), double(3)) + // --- Core ASIM fields --- + | extend + EventVendor = "Bitdefender", + EventProduct = "GravityZone", + EventSchema = "AlertEvent", + EventSchemaVersion = 1, + EventType = "Alert", + EventStartTime = start_time, + EventEndTime = end_time, + EventUid = tostring(d.submissionId), + + EventSeverity = case( + MaxRemediationAction == 3, "Low", // report only + MaxRemediationAction == 2, "Medium", // move + "High"// fallback - disinfect or delete + ), + EventCount = 1, + DvcHostname = d.computerName, + DvcIpAddr = d.computerIp, + DvcId = d.endpointId + // --- Additional (packed) fields --- + | extend AdditionalFields = bag_pack( + "CompanyId", company_id, + "DeviceExternalId", d.deviceExternalId, + "ThreatType", d.threatType, + "FilePaths", d.filePaths, + "FileSizes", d.fileSizes, + "RemediationActions", d.remediationActions, + "Module", module + ) + // Final ASIM projection (keep core + AdditionalFields) + | project + EventVendor, + EventProduct, + EventSchema, + EventSchemaVersion, + EventType, + EventStartTime, + EventEndTime, + EventUid, + EventSeverity, + EventCount, + DvcId, + DvcHostname, + DvcIpAddr, + AdditionalFields; + + + let exchangeMalwareEvents = allEvents + | where module == "exchange-malware" + | extend d = data + // --- Core ASIM fields --- + | extend + EventVendor = "Bitdefender", + EventProduct = "GravityZone", + EventSchema = "AlertEvent", + EventSchemaVersion = 1, + EventType = "Alert", + EventStartTime = start_time, + EventEndTime = end_time, + EventUid = _ItemId, + EventSeverity = "Informational", + EventCount = 1, + DvcHostname = d.computer_name, + DvcFQDN = d.computer_fqdn, + DvcIpAddr = d.computer_ip, + DvcId = d.endpointId, + DvcAction = "Blocked" + // --- Additional (packed) fields --- + | extend AdditionalFields = bag_pack( + "CompanyId", company_id, + "Malware", d.malware, + "Subject", d.subject, + "Recipients", d.recipients, + "Sender", d.sender, + "ServerName", d.serverName, + "EndpointProduct", d.product_installed, + "Module", module + ) + // Final ASIM projection (keep core + AdditionalFields) + | project + EventVendor, + EventProduct, + EventSchema, + EventSchemaVersion, + EventType, + EventStartTime, + EventEndTime, + EventUid, + EventSeverity, + EventCount, + DvcId, + DvcHostname, + DvcFQDN, + DvcIpAddr, + DvcAction, + AdditionalFields; + + union newExtendedIncidentEvents, newIncidentEvents, ransomwareMitigationEvents, networkSandboxingEvents, exchangeMalwareEvents + }; + parser(disabled=disabled) diff --git a/Parsers/ASimAlertEvent/Parsers/imAlertEvent.yaml b/Parsers/ASimAlertEvent/Parsers/imAlertEvent.yaml index 7bd90619955..4d493ea584e 100644 --- a/Parsers/ASimAlertEvent/Parsers/imAlertEvent.yaml +++ b/Parsers/ASimAlertEvent/Parsers/imAlertEvent.yaml @@ -18,6 +18,7 @@ ParserName: imAlertEvent EquivalentBuiltInParser: _Im_AlertEvent Parsers: - _Im_AlertEvent_Empty + - _Im_AlertEvent_BitdefenderGravityZone - _Im_AlertEvent_MicrosoftDefenderXDR - _Im_AlertEvent_SentinelOneSingularity ParserParams: @@ -72,6 +73,7 @@ ParserQuery: | { union isfuzzy=true vimAlertEventEmpty, + vimAlertEventBitdefenderGravityZone (starttime=starttime, endtime=endtime, ipaddr_has_any_prefix=ipaddr_has_any_prefix, hostname_has_any=hostname_has_any, username_has_any=username_has_any, attacktactics_has_any=attacktactics_has_any, attacktechniques_has_any=attacktechniques_has_any, threatcategory_has_any=threatcategory_has_any, alertverdict_has_any=alertverdict_has_any, eventseverity_has_any=eventseverity_has_any, disabled=(vimBuiltInDisabled or ('ExcludevimAlertBitdefenderGravityZone' in (DisabledParsers)))), vimAlertEventMicrosoftDefenderXDR (starttime=starttime, endtime=endtime, ipaddr_has_any_prefix=ipaddr_has_any_prefix, hostname_has_any=hostname_has_any, username_has_any=username_has_any, attacktactics_has_any=attacktactics_has_any, attacktechniques_has_any=attacktechniques_has_any, threatcategory_has_any=threatcategory_has_any, alertverdict_has_any=alertverdict_has_any, eventseverity_has_any=eventseverity_has_any, disabled=(vimBuiltInDisabled or ('ExcludevimAlertMicrosoftDefenderXDR' in (DisabledParsers)))), vimAlertEventSentinelOneSingularity (starttime=starttime, endtime=endtime, ipaddr_has_any_prefix=ipaddr_has_any_prefix, hostname_has_any=hostname_has_any, username_has_any=username_has_any, attacktactics_has_any=attacktactics_has_any, attacktechniques_has_any=attacktechniques_has_any, threatcategory_has_any=threatcategory_has_any, alertverdict_has_any=alertverdict_has_any, eventseverity_has_any=eventseverity_has_any, disabled=(vimBuiltInDisabled or ('ExcludevimAlertSentinelOneSingularity' in (DisabledParsers)))) }; diff --git a/Parsers/ASimAlertEvent/Parsers/vimAlertEventBitdefenderGravityZone.yaml b/Parsers/ASimAlertEvent/Parsers/vimAlertEventBitdefenderGravityZone.yaml new file mode 100644 index 00000000000..0af4c991de6 --- /dev/null +++ b/Parsers/ASimAlertEvent/Parsers/vimAlertEventBitdefenderGravityZone.yaml @@ -0,0 +1,341 @@ +Parser: + Title: Alert Event ASIM filtering parser for Bitdefender GravityZone + Version: '0.1.0' + LastUpdated: Sep 17, 2025 +Product: + Name: Bitdefender GravityZone +Normalization: + Schema: AlertEvent + Version: '0.1' +References: + - Title: ASIM Alert Schema + Link: https://aka.ms/ASimAlertEventDoc + - Title: ASIM + Link: https://aka.ms/AboutASIM +Description: | + This ASIM parser supports normalizing and filtering the Bitdefender GravityZone logs to the ASIM Alert normalized schema. +ParserName: vimAlertEventBitdefenderGravityZone +EquivalentBuiltInParser: _Im_AlertEvent_BitdefenderGravityZone +ParserParams: + - Name: starttime + Type: datetime + Default: datetime(null) + - Name: endtime + Type: datetime + Default: datetime(null) + - Name: disabled + Type: bool + Default: false +ParserQuery: | + let parser = ( + starttime: datetime=datetime(null), + endtime: datetime=datetime(null), + ipaddr_has_any_prefix: dynamic=dynamic([]), + hostname_has_any: dynamic=dynamic([]), + username_has_any: dynamic=dynamic([]), + attacktactics_has_any: dynamic=dynamic([]), + attacktechniques_has_any: dynamic=dynamic([]), + threatcategory_has_any: dynamic=dynamic([]), + alertverdict_has_any: dynamic=dynamic([]), + eventseverity_has_any: dynamic=dynamic([]), + disabled: bool=false) { + + // Filtering based on the parameters above + + let allEvents = GzSecurityEvents_CL + | where not(disabled) + | where (isnull(starttime) or start_time >= starttime) + | where (isnull(endtime) or end_time <= endtime); + + let newIncidentEvents = allEvents + | where module == "new-incident" + | extend d = data + // --- Core ASIM fields --- + | extend + EventVendor = "Bitdefender", + EventProduct = "GravityZone", + EventSchema = "AlertEvent", + EventSchemaVersion = 1, + EventType = "Alert", + EventStartTime = start_time, + EventEndTime = end_time, + EventUid = tostring(d.incident_id), + EventSeverity = case( + d.severity == "low", "Low", + d.severity == "medium", "Medium", + d.severity == "high", "High", + "Low" // fallback value if null or unmatched + ), + EventCount = 1, + DvcHostname = d.computer_name, + DvcFQDN = d.computer_fqdn, + DvcIpAddr = d.computer_ip, + DvcId = d.endpointId, + DvcAction = replace_string(tostring(d.main_action), "_", " ") + // --- Additional (packed) fields --- + | extend AdditionalFields = bag_pack( + "SeverityScore", d.severity_score, + "DetectionName", d.detection_name, + "FileName", d.file_name, + "FilePath", d.file_path, + "FileHashMd5", d.file_hash_md5, + "FileHashSha256", d.file_hash_sha256, + "URL", d.url, + "Port", d.port, + "Protocol", d.protocol, + "SourceIp", d.source_ip, + "ProcessPid", d.process_pid, + "ProcessPath", d.process_path, + "ParentProcessPid", d.parent_process_pid, + "ParentProcessPath", d.parent_process_path, + "AttackTypes", d.attack_types, + "AttCkId", d.att_ck_id, + "ProcessCommandLine", d.process_command_line, + "Username", d.username, + "UserSid", d.user_sid, + "CompanyId", company_id, + "Module", "EDR" + ) + // Final ASIM projection (keep core + AdditionalFields) + | project + EventVendor, + EventProduct, + EventSchema, + EventSchemaVersion, + EventType, + EventStartTime, + EventEndTime, + EventUid, + EventSeverity, + EventCount, + DvcHostname, + DvcFQDN, + DvcIpAddr, + DvcId, + DvcAction, + AdditionalFields; + + let newExtendedIncidentEvents = allEvents + | where module == "new-extended-incident" + | extend d = data + // --- Core ASIM fields --- + | extend + EventVendor = "Bitdefender", + EventProduct = "GravityZone", + EventSchema = "AlertEvent", + EventSchemaVersion = 1, + EventType = "Alert", + EventStartTime = start_time, + EventEndTime = end_time, + EventUid = tostring(d.incident_id), + EventSeverity = case( + d.severity == "low", "Low", + d.severity == "medium", "Medium", + d.severity == "high", "High", + "Low" // fallback value if null or unmatched + ), + EventCount = 1, + DvcAction = replace_string(tostring(d.main_action), "_", " ") + // --- Additional (packed) fields --- + | extend AdditionalFields = bag_pack( + "SeverityScore", d.severity_score, + "IncidentNumber", d.incident_number, + "IncidentVersion", d.version, + "KillchainPhases", d.killchain_phases, + "LastKillchainPhase", d.last_killchain_phase, + "AttackTypes", d.attack_types, + "CorrelatedIncidentIds", d.correlated_incidents, + "Nodes", d.nodes, + "CompanyId", company_id, + "Module", "XDR" + ) + // Final ASIM projection (keep core + AdditionalFields) + | project + EventVendor, + EventProduct, + EventSchema, + EventSchemaVersion, + EventType, + EventStartTime, + EventEndTime, + EventUid, + EventSeverity, + EventCount, + DvcAction, + AdditionalFields; + + let ransomwareMitigationEvents = allEvents + | where module == "ransomware-mitigation" + | extend d = data + // --- Core ASIM fields --- + | extend + EventVendor = "Bitdefender", + EventProduct = "GravityZone", + EventSchema = "AlertEvent", + EventSchemaVersion = 1, + EventType = "Alert", + EventStartTime = start_time, + EventEndTime = end_time, + EventUid = _ItemId, + EventSeverity = "Informational", + EventCount = 1, + DvcHostname = d.computer_name, + DvcFQDN = d.computer_fqdn, + DvcIpAddr = d.computer_ip, + DvcId = d.endpointId, + DvcAction = "Blocked" + // --- Additional (packed) fields --- + | extend AdditionalFields = bag_pack( + "CompanyId", company_id, + "CompanyName", d.company_name, + "AttackType", d.attack_type, // custom, not MITRE value + "AttackTypes", dynamic([]), // mandatory MITRE field + "ItemCount", d.item_count, + "AttackSource", d.attack_source, + "EndpointProduct", d.product_installed, + "Module", module + ) + // Final ASIM projection (keep core + AdditionalFields) + | project + EventVendor, + EventProduct, + EventSchema, + EventSchemaVersion, + EventType, + EventStartTime, + EventEndTime, + EventUid, + EventSeverity, + EventCount, + DvcId, + DvcHostname, + DvcFQDN, + DvcIpAddr, + DvcAction, + AdditionalFields; + + let networkSandboxingEvents = allEvents + | where module == "network-sandboxing" + | extend d = data + // --- Compute required fields for downstream mapping --- + | extend MaxRemediationAction = iff(array_length(d.remediationActions) > 0, todouble(array_sort_desc(d.remediationActions)[0]), double(3)) + // --- Core ASIM fields --- + | extend + EventVendor = "Bitdefender", + EventProduct = "GravityZone", + EventSchema = "AlertEvent", + EventSchemaVersion = 1, + EventType = "Alert", + EventStartTime = start_time, + EventEndTime = end_time, + EventUid = tostring(d.submissionId), + EventSeverity = case( + MaxRemediationAction == 3, "Low", // report only + MaxRemediationAction == 2, "Medium", // move + "High"// fallback - disinfect or delete + ), + EventCount = 1, + DvcHostname = d.computerName, + DvcIpAddr = d.computerIp, + DvcId = d.endpointId + // --- Additional (packed) fields --- + | extend AdditionalFields = bag_pack( + "CompanyId", company_id, + "DeviceExternalId", d.deviceExternalId, + "ThreatType", d.threatType, + "FilePaths", d.filePaths, + "FileSizes", d.fileSizes, + "RemediationActions", d.remediationActions, + "AttackTypes", dynamic([]), // mandatory MITRE field + "Module", module + ) + // Final ASIM projection (keep core + AdditionalFields) + | project + EventVendor, + EventProduct, + EventSchema, + EventSchemaVersion, + EventType, + EventStartTime, + EventEndTime, + EventUid, + EventSeverity, + EventCount, + DvcId, + DvcHostname, + DvcIpAddr, + AdditionalFields; + + + let exchangeMalwareEvents = allEvents + | where module == "exchange-malware" + | extend d = data + // --- Core ASIM fields --- + | extend + EventVendor = "Bitdefender", + EventProduct = "GravityZone", + EventSchema = "AlertEvent", + EventSchemaVersion = 1, + EventType = "Alert", + EventStartTime = start_time, + EventEndTime = end_time, + EventUid = _ItemId, + EventSeverity = "Informational", + EventCount = 1, + DvcHostname = d.computer_name, + DvcFQDN = d.computer_fqdn, + DvcIpAddr = d.computer_ip, + DvcId = d.endpointId, + DvcAction = "Blocked" + // --- Additional (packed) fields --- + | extend AdditionalFields = bag_pack( + "CompanyId", company_id, + "Malware", d.malware, + "Subject", d.subject, + "Recipients", d.recipients, + "Sender", d.sender, + "ServerName", d.serverName, + "EndpointProduct", d.product_installed, + "AttackTypes", dynamic([]), // mandatory MITRE field + "Module", module + ) + // Final ASIM projection (keep core + AdditionalFields) + | project + EventVendor, + EventProduct, + EventSchema, + EventSchemaVersion, + EventType, + EventStartTime, + EventEndTime, + EventUid, + EventSeverity, + EventCount, + DvcId, + DvcHostname, + DvcFQDN, + DvcIpAddr, + DvcAction, + AdditionalFields; + + union newExtendedIncidentEvents, newIncidentEvents, ransomwareMitigationEvents, networkSandboxingEvents, exchangeMalwareEvents + | where (array_length(ipaddr_has_any_prefix) == 0 or array_length(array_intersect(ipaddr_has_any_prefix, pack_array(tostring(DvcIpAddr)))) > 0) + | where (array_length(hostname_has_any) == 0 or array_length(array_intersect(hostname_has_any, pack_array(tostring(DvcHostname)))) > 0) + | where (array_length(attacktactics_has_any) == 0 or array_length(array_intersect(attacktactics_has_any, pack_array(tostring(AdditionalFields.AttackTypes)))) > 0) + | where (array_length(alertverdict_has_any) == 0 or array_length(array_intersect(alertverdict_has_any, pack_array(tostring(DvcAction)))) > 0) + | where (array_length(eventseverity_has_any) == 0 or array_length(array_intersect(eventseverity_has_any, pack_array(tostring(EventSeverity)))) > 0); + + }; + parser( + starttime = starttime, + endtime = endtime, + ipaddr_has_any_prefix = ipaddr_has_any_prefix, + hostname_has_any = hostname_has_any, + username_has_any = username_has_any, + attacktactics_has_any = attacktactics_has_any, + attacktechniques_has_any = attacktechniques_has_any, + threatcategory_has_any = threatcategory_has_any, + alertverdict_has_any = alertverdict_has_any, + eventseverity_has_any = eventseverity_has_any, + disabled = disabled + ) diff --git a/Parsers/ASimWebSession/ARM/FullDeploymentWebSession.json b/Parsers/ASimWebSession/ARM/FullDeploymentWebSession.json index 26e848da3cc..f8f9fa99bd6 100644 --- a/Parsers/ASimWebSession/ARM/FullDeploymentWebSession.json +++ b/Parsers/ASimWebSession/ARM/FullDeploymentWebSession.json @@ -365,7 +365,7 @@ "properties": { "mode": "Incremental", "templateLink": { - "uri": "https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/ASimWebSession/ARM/ASimWebSessionAzureFirewall/ASimWebSessionAzureFirewall.json", + "uri": "https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/ASimWebSession/ARM/AsimWebSessionAzureFirewall/ASimWebSessionAzureFirewall.json", "contentVersion": "1.0.0.0" }, "parameters": { diff --git a/Sample Data/ASIM/Bitdefender_GravityZone_ASimAlertEvent_RawLogs.json b/Sample Data/ASIM/Bitdefender_GravityZone_ASimAlertEvent_RawLogs.json new file mode 100644 index 00000000000..9551fb8ff27 --- /dev/null +++ b/Sample Data/ASIM/Bitdefender_GravityZone_ASimAlertEvent_RawLogs.json @@ -0,0 +1,724 @@ +[ + { + "companyId": "5efb3a520075db7384dfa290", + "module": "new-incident", + "start_time": "2025-12-10T15:40:38.431Z", + "end_time": "2025-12-10T15:40:38.431Z", + "data": { + "created": "2020-07-20T09:36:23.485Z", + "computer_id": "5efb3a520075db7384dfa286", + "computer_fqdn": "desktop-jac14gs", + "computer_name": "DESKTOP-JAC14GS", + "detection_name": "ATC.Malicious", + "attack_types": [ + "Other" + ], + "computer_ip": "10.17.23.30", + "severityScore": 90, + "incident_id": "5f1557cbe7b2584f3959ee19", + "attack_entry": 1688239188, + "parent_process_path": "c:\\windows\\system32\\cmd.exe", + "parent_process_pid": 9636, + "process_path": "c:\\users\\bdadmin\\desktop\\atcsim\\atcsim32.exe", + "process_pid": 10324, + "username": "DESKTOP-JAC14GS\\bdadmin", + "user_sid": "S-1-5-21-3349207704-443292085-2237656896-1003", + "process_command_line": "detect", + "file_hash_md5": "ccb1b07bdf330627f02b3c832663a489", + "file_hash_sha256": "d5adc6a65a57d30d3ae70d195983d155e7cd24f26eb1ebebde9b92655251ec55", + "att_ck_id": [ + "T1036", + "T1059", + "T1002", + "T1012" + ], + "severity": "high", + "main_action": "no action", + "endpointId": "5efb3a520075db7384dfa285", + "nodes": [ + { + "id": "67dd30dd4a842ebbbb0b6b1b", + "name": "runme.exe", + "type": "process_execution", + "details": { + "file": { + "name": null, + "path": "c:\\users\\bdvm\\desktop\\edr win samples\\ctc sample\\runme.exe", + "md5": "b5f9240a49fcc6be5de168c5cbbff59a", + "sha256": "8407fe2c7da0141f111806ec5d3453d92099b75070b0ff829f2efcc38100794d" + }, + "process": { + "pid": 6368, + "parent": { + "pid": 7036, + "name": "explorer.exe", + "path": null + }, + "command_line": "", + "user_sid": "S-1-5-21-3349207704-443292085-2237656896-1003", + "user_name": "LEV-EDR5\\BDVM", + "execution_date": "2020-11-26T09:07:47.000Z", + "name": null + } + }, + "total_alerts": 0 + }, + { + "id": "67dd30dd4a842ebbbb0b6b1c", + "name": "desktop", + "type": "file", + "details": { + "name": null, + "path": "c:\\users\\bdvm\\desktop", + "md5": null, + "sha256": null + }, + "total_alerts": 0 + }, + { + "id": "67dd30dd4a842ebbbb0b6b1c", + "type": "registry", + "details": { + "registry": { + "key": "", + "value": "", + "data": "" + }, + "process": { + "id": 0, + "name": "", + "path": "" + } + }, + "total_alerts": 0 + }, + { + "id": "67dd30dd4a842ebbbb0b6b1c", + "type": "domain", + "details": { + "domain": { + "requested_url": "", + "remote_port": "", + "source_application": "" + }, + "process": { + "id": 0, + "name": "", + "path": "" + }, + "file": { + "md5": "", + "sha256": "" + } + }, + "total_alerts": 0 + }, + { + "id": "67dd30dd4a842ebbbb0b6b1c", + "type": "container_or_container_host", + "details": { + "name": "", + "hardware_id": "", + "ip": "", + "container": true, + "container_host": false + }, + "total_alerts": 0 + }, + { + "id": "67dd30dd4a842ebbbb0b6b1c", + "type": "endpoint", + "details": { + "name": "", + "hardware_id": "", + "ip": "", + "container": true, + "container_host": false + }, + "total_alerts": 0 + } + ] + } + }, + { + "companyId": "5efb3a520075db7384dfa290", + "module": "new-incident", + "start_time": "2025-12-10T15:40:38.432Z", + "end_time": "2025-12-10T15:40:38.432Z", + "data": { + "created": "2020-07-20T09:36:23.485Z", + "computer_id": "5efb3a520075db7384dfa286", + "computer_fqdn": "desktop-jac14gs", + "computer_name": "DESKTOP-JAC14GS", + "detection_name": "ATC.Malicious", + "attack_types": [ + "Other" + ], + "computer_ip": "10.17.23.30", + "severityScore": 90, + "incident_id": "5f1557cbe7b2584f3959ee19", + "attack_entry": 1688239188, + "parent_process_path": "c:\\windows\\system32\\cmd.exe", + "parent_process_pid": 9636, + "process_path": "c:\\users\\bdadmin\\desktop\\atcsim\\atcsim32.exe", + "process_pid": 10324, + "username": "DESKTOP-JAC14GS\\bdadmin", + "user_sid": "S-1-5-21-3349207704-443292085-2237656896-1003", + "process_command_line": "detect", + "file_hash_md5": "ccb1b07bdf330627f02b3c832663a489", + "file_hash_sha256": "d5adc6a65a57d30d3ae70d195983d155e7cd24f26eb1ebebde9b92655251ec55", + "att_ck_id": [ + "T1036", + "T1059", + "T1002", + "T1012" + ], + "severity": "high", + "main_action": "no action", + "endpointId": "5efb3a520075db7384dfa285", + "nodes": [ + { + "id": "67dd30dd4a842ebbbb0b6b1b", + "name": "runme.exe", + "type": "process_execution", + "details": { + "file": { + "name": null, + "path": "c:\\users\\bdvm\\desktop\\edr win samples\\ctc sample\\runme.exe", + "md5": "b5f9240a49fcc6be5de168c5cbbff59a", + "sha256": "8407fe2c7da0141f111806ec5d3453d92099b75070b0ff829f2efcc38100794d" + }, + "process": { + "pid": 6368, + "parent": { + "pid": 7036, + "name": "explorer.exe", + "path": null + }, + "command_line": "", + "user_sid": "S-1-5-21-3349207704-443292085-2237656896-1003", + "user_name": "LEV-EDR5\\BDVM", + "execution_date": "2020-11-26T09:07:47.000Z", + "name": null + } + }, + "total_alerts": 0 + }, + { + "id": "67dd30dd4a842ebbbb0b6b1c", + "name": "desktop", + "type": "file", + "details": { + "name": null, + "path": "c:\\users\\bdvm\\desktop", + "md5": null, + "sha256": null + }, + "total_alerts": 0 + }, + { + "id": "67dd30dd4a842ebbbb0b6b1c", + "type": "registry", + "details": { + "registry": { + "key": "", + "value": "", + "data": "" + }, + "process": { + "id": 0, + "name": "", + "path": "" + } + }, + "total_alerts": 0 + }, + { + "id": "67dd30dd4a842ebbbb0b6b1c", + "type": "domain", + "details": { + "domain": { + "requested_url": "", + "remote_port": "", + "source_application": "" + }, + "process": { + "id": 0, + "name": "", + "path": "" + }, + "file": { + "md5": "", + "sha256": "" + } + }, + "total_alerts": 0 + }, + { + "id": "67dd30dd4a842ebbbb0b6b1c", + "type": "container_or_container_host", + "details": { + "name": "", + "hardware_id": "", + "ip": "", + "container": true, + "container_host": false + }, + "total_alerts": 0 + }, + { + "id": "67dd30dd4a842ebbbb0b6b1c", + "type": "endpoint", + "details": { + "name": "", + "hardware_id": "", + "ip": "", + "container": true, + "container_host": false + }, + "total_alerts": 0 + } + ] + } + }, + { + "companyId": "5efb3a520075db7384dfa290", + "module": "new-extended-incident", + "start_time": "2025-12-10T15:40:38.432Z", + "end_time": "2025-12-10T15:40:38.432Z", + "data": { + "created": "2020-07-20T09:36:23.485Z", + "last_updated": "2020-07-20T09:36:23.485Z", + "last_processed": "2020-07-20T09:36:23.485Z", + "incident_id": "5f1557cbe7b2584f3959ee19", + "incident_number": 1, + "version": 1, + "severity_score": 100, + "severity": "high", + "main_action": "action_needed", + "killchain_phases": [ + "initial_access", + "execution", + "c&c" + ], + "last_killchain_phase": "execution", + "attack_types": [ + "exploit", + "spearphishing", + "exfiltration" + ], + "correlated_incidents": [ + "5f1557cbe7b2584f3959ee17", + "5f1557cbe7b2584f3959ee18" + ], + "nodes": [ + { + "id": "67dd30dd4a842ebbbb0b6b1b", + "name": "", + "type": "endpoint", + "details": { + "hardware_id": "", + "ips": [ + "127.0.0.1", + "192.168.1.1" + ], + "macs": [ + "" + ], + "computer_id": "" + }, + "total_alerts": 0 + }, + { + "id": "67dd30dd4a842ebbbb0b6b1b", + "name": "", + "type": "server", + "details": { + "hardware_id": "", + "ips": [ + "127.0.0.1", + "192.168.1.1" + ], + "macs": [ + "" + ], + "computer_id": "", + "network_services": [ + "file_sharing", + "mail", + "dc_controller" + ] + }, + "total_alerts": 0 + }, + { + "id": "67", + "name": "", + "type": "mobile_device", + "details": { + "device_id": "", + "ip": "127.0.0.1", + "operating_system": "", + "device_group_name": "", + "phone_number": "" + }, + "total_alerts": 0 + }, + { + "id": "67dd30dd4a842ebbbb0b6b1b", + "name": "", + "type": "router", + "details": { + "ip": "127.0.0.1", + "mac": "" + }, + "total_alerts": 0 + }, + { + "id": "67dd30dd4a842ebbbb0b6b1b", + "name": "", + "type": "printer", + "details": { + "ip": "127.0.0.1", + "mac": "" + }, + "total_alerts": 0 + }, + { + "id": "67dd30dd4a842ebbbb0b6b1b", + "name": "", + "type": "iot", + "details": { + "ip": "127.0.0.1", + "mac": "" + }, + "total_alerts": 0 + }, + { + "id": "67dd30dd4a842ebbbb0b6b1b", + "name": "", + "type": "attacker", + "details": { + "threat_group": "Lazarus" + }, + "total_alerts": 0 + }, + { + "id": "67dd30dd4a842ebbbb0b6b1b", + "name": "", + "type": "email", + "details": { + "sender": "", + "recipients": [ + "" + ], + "subject": "", + "attachments": [ + "" + ] + }, + "total_alerts": 0 + } + ] + } + }, + { + "companyId": "5efb3a520075db7384dfa290", + "module": "new-extended-incident", + "start_time": "2025-12-10T15:40:38.432Z", + "end_time": "2025-12-10T15:40:38.432Z", + "data": { + "created": "2020-07-20T09:36:23.485Z", + "last_updated": "2020-07-20T09:36:23.485Z", + "last_processed": "2020-07-20T09:36:23.485Z", + "incident_id": "5f1557cbe7b2584f3959ee19", + "incident_number": 1, + "version": 1, + "severity_score": 100, + "severity": "high", + "main_action": "action_needed", + "killchain_phases": [ + "initial_access", + "execution", + "c&c" + ], + "last_killchain_phase": "execution", + "attack_types": [ + "exploit", + "spearphishing", + "exfiltration" + ], + "correlated_incidents": [ + "5f1557cbe7b2584f3959ee17", + "5f1557cbe7b2584f3959ee18" + ], + "nodes": [ + { + "id": "67dd30dd4a842ebbbb0b6b1b", + "name": "", + "type": "endpoint", + "details": { + "hardware_id": "", + "ips": [ + "127.0.0.1", + "192.168.1.1" + ], + "macs": [ + "" + ], + "computer_id": "" + }, + "total_alerts": 0 + }, + { + "id": "67dd30dd4a842ebbbb0b6b1b", + "name": "", + "type": "server", + "details": { + "hardware_id": "", + "ips": [ + "127.0.0.1", + "192.168.1.1" + ], + "macs": [ + "" + ], + "computer_id": "", + "network_services": [ + "file_sharing", + "mail", + "dc_controller" + ] + }, + "total_alerts": 0 + }, + { + "id": "67", + "name": "", + "type": "mobile_device", + "details": { + "device_id": "", + "ip": "127.0.0.1", + "operating_system": "", + "device_group_name": "", + "phone_number": "" + }, + "total_alerts": 0 + }, + { + "id": "67dd30dd4a842ebbbb0b6b1b", + "name": "", + "type": "router", + "details": { + "ip": "127.0.0.1", + "mac": "" + }, + "total_alerts": 0 + }, + { + "id": "67dd30dd4a842ebbbb0b6b1b", + "name": "", + "type": "printer", + "details": { + "ip": "127.0.0.1", + "mac": "" + }, + "total_alerts": 0 + }, + { + "id": "67dd30dd4a842ebbbb0b6b1b", + "name": "", + "type": "iot", + "details": { + "ip": "127.0.0.1", + "mac": "" + }, + "total_alerts": 0 + }, + { + "id": "67dd30dd4a842ebbbb0b6b1b", + "name": "", + "type": "attacker", + "details": { + "threat_group": "Lazarus" + }, + "total_alerts": 0 + }, + { + "id": "67dd30dd4a842ebbbb0b6b1b", + "name": "", + "type": "email", + "details": { + "sender": "", + "recipients": [ + "" + ], + "subject": "", + "attachments": [ + "" + ] + }, + "total_alerts": 0 + } + ] + } + }, + { + "companyId": "6e6b3a520075db7384dfa276", + "module": "ransomware-mitigation", + "start_time": "2025-12-10T15:40:38.432Z", + "end_time": "2025-12-10T15:40:38.432Z", + "data": { + "product_installed": "SVA", + "user": { + "name": "user", + "sid": "S-11-22-33" + }, + "company_name": "Bitdefender", + "computer_name": "DC-Nebula", + "computer_fqdn": "dc-nebula.nebula.local", + "computer_ip": "10.17.16.10", + "computer_id": "5ed4d2fef23f7325715dbb22", + "attack_type": "remote", + "item_count": "23", + "detected_on": 1591007594, + "attack_source": "10.10.20.120" + } + }, + { + "companyId": "6e6b3a520075db7384dfa276", + "module": "ransomware-mitigation", + "start_time": "2025-12-10T15:40:38.432Z", + "end_time": "2025-12-10T15:40:38.432Z", + "data": { + "product_installed": "SVA", + "user": { + "name": "user", + "sid": "S-11-22-33" + }, + "company_name": "Bitdefender", + "computer_name": "DC-Nebula", + "computer_fqdn": "dc-nebula.nebula.local", + "computer_ip": "10.17.16.10", + "computer_id": "5ed4d2fef23f7325715dbb22", + "attack_type": "remote", + "item_count": "23", + "detected_on": 1591007594, + "attack_source": "10.10.20.120" + } + }, + { + "companyId": "5efb3a520075db7384fff299", + "module": "network-sandboxing", + "start_time": "2025-12-10T15:40:38.432Z", + "end_time": "2025-12-10T15:40:38.432Z", + "data": { + "endpointId": "59a1604e60369e06733f8aba", + "computerName": "FC-WIN7-X64-01", + "computerIp": "192.168.0.1", + "detectionTime": 1505386969, + "threatType": "RANSOMWARE", + "filePaths": [ + "C:\\Users\\Administrator\\Documents\\installer.xml", + "D:\\opt\\bitdefender\\installer2.xml", + "D:\\sources\\console\\CommonConsole\\app\\modules\\policies\\view\\endpoints\\networkSandboxing\\installer3.xml" + ], + "fileSizes": [ + "2614", + "2615", + "2616" + ], + "remediationActions": [ + "1", + "", + "1" + ] + } + }, + { + "companyId": "5efb3a520075db7384fff299", + "module": "network-sandboxing", + "start_time": "2025-12-10T15:40:38.432Z", + "end_time": "2025-12-10T15:40:38.432Z", + "data": { + "endpointId": "59a1604e60369e06733f8aba", + "computerName": "FC-WIN7-X64-01", + "computerIp": "192.168.0.1", + "detectionTime": 1505386969, + "threatType": "RANSOMWARE", + "filePaths": [ + "C:\\Users\\Administrator\\Documents\\installer.xml", + "D:\\opt\\bitdefender\\installer2.xml", + "D:\\sources\\console\\CommonConsole\\app\\modules\\policies\\view\\endpoints\\networkSandboxing\\installer3.xml" + ], + "fileSizes": [ + "2614", + "2615", + "2616" + ], + "remediationActions": [ + "1", + "", + "1" + ] + } + }, + { + "companyId": "6e6b3a520075db7384dfa276", + "module": "exchange-malware", + "start_time": "2025-12-10T15:40:38.432Z", + "end_time": "2025-12-10T15:40:38.432Z", + "data": { + "computer_name": "FC- EXCHANGE - 01", + "computer_fqdn": "fc- exchange - 01.fc.dom", + "computer_ip": "192.168.0.1", + "computer_id": "59b7d9bfa849af3a1465b7e4", + "product_installed": "BEST", + "endpointId": "59b7d9bfa849af3a1465b7e3", + "serverName": "FC- EXCHANGE - 01", + "sender": "fc_test01@fc.dom", + "recipients": [ + "fc_test02@fc.dom" + ], + "subject": "Emailing Sending.. WL - cbe100c9f42a20ef9a4b1c20ed1a59f9 - 0", + "detectionTime": "2017- 09 - 13T14: 20:37.000Z", + "malware": [ + { + "malwareName": "Trojan.Generic.KD.874127", + "malwareType": "virus", + "actionTaken": "quarantine", + "infectedObject": "WL- cbe100c9f42a20ef9a4b1c20ed1a59f9 - 0" + } + ] + } + }, + { + "companyId": "6e6b3a520075db7384dfa276", + "module": "exchange-malware", + "start_time": "2025-12-10T15:40:38.432Z", + "end_time": "2025-12-10T15:40:38.432Z", + "data": { + "computer_name": "FC- EXCHANGE - 01", + "computer_fqdn": "fc- exchange - 01.fc.dom", + "computer_ip": "192.168.0.1", + "computer_id": "59b7d9bfa849af3a1465b7e4", + "product_installed": "BEST", + "endpointId": "59b7d9bfa849af3a1465b7e3", + "serverName": "FC- EXCHANGE - 01", + "sender": "fc_test01@fc.dom", + "recipients": [ + "fc_test02@fc.dom" + ], + "subject": "Emailing Sending.. WL - cbe100c9f42a20ef9a4b1c20ed1a59f9 - 0", + "detectionTime": "2017- 09 - 13T14: 20:37.000Z", + "malware": [ + { + "malwareName": "Trojan.Generic.KD.874127", + "malwareType": "virus", + "actionTaken": "quarantine", + "infectedObject": "WL- cbe100c9f42a20ef9a4b1c20ed1a59f9 - 0" + } + ] + } + } +] \ No newline at end of file diff --git a/Solutions/AbuseIPDB/Data/Solution_AbuseIPDB.json b/Solutions/AbuseIPDB/Data/Solution_AbuseIPDB.json index 3bd724a43e1..3c7b5ac7caf 100644 --- a/Solutions/AbuseIPDB/Data/Solution_AbuseIPDB.json +++ b/Solutions/AbuseIPDB/Data/Solution_AbuseIPDB.json @@ -6,11 +6,11 @@ "Playbooks": [ "Solutions/AbuseIPDB/Playbooks/AbuseIPDBAPIConnector/azuredeploy.json", "Solutions/AbuseIPDB/Playbooks/AbuseIPDB-EnrichIncidentByIPInfo/azuredeploy.json", - "Solutions/AbuseIPDB/Playbooks/AbuseIPDB-ReportaIPsToAbuselPDBAfterCheckingByUserInMSTeams/azuredeploy.json", + "Solutions/AbuseIPDB/Playbooks/AbuseIPDB-ReportIPsAfterUserResponseInMSTeams/azuredeploy.json", "Solutions/AbuseIPDB/Playbooks/AbuseIPDB-BlacklistIpToThreatIntelligence/azuredeploy.json" ], "BasePath": "C:\\GitHub\\Azure-Sentinel", - "Version": "3.0.1", + "Version": "3.0.2", "Metadata": "SolutionMetadata.json", "TemplateSpec": true, "Is1PConnector": false diff --git a/Solutions/AbuseIPDB/Data/system_generated_metadata.json b/Solutions/AbuseIPDB/Data/system_generated_metadata.json index 888683d18e0..6354195a5db 100644 --- a/Solutions/AbuseIPDB/Data/system_generated_metadata.json +++ b/Solutions/AbuseIPDB/Data/system_generated_metadata.json @@ -30,6 +30,6 @@ "Playbooks/AbuseIPDBAPIConnector/azuredeploy.json", "Playbooks/AbuseIPDB-BlacklistIpToThreatIntelligence/azuredeploy.json", "Playbooks/AbuseIPDB-EnrichIncidentByIPInfo/azuredeploy.json", - "Playbooks/AbuseIPDB-ReportaIPsToAbuselPDBAfterCheckingByUserInMSTeams/azuredeploy.json" + "Playbooks/AbuseIPDB-ReportIPsAfterUserResponseInMSTeams/azuredeploy.json" ] } diff --git a/Solutions/AbuseIPDB/Package/3.0.2.zip b/Solutions/AbuseIPDB/Package/3.0.2.zip new file mode 100644 index 00000000000..b4f02869635 Binary files /dev/null and b/Solutions/AbuseIPDB/Package/3.0.2.zip differ diff --git a/Solutions/AbuseIPDB/Package/mainTemplate.json b/Solutions/AbuseIPDB/Package/mainTemplate.json index fac8fafb964..9c91f3abe04 100644 --- a/Solutions/AbuseIPDB/Package/mainTemplate.json +++ b/Solutions/AbuseIPDB/Package/mainTemplate.json @@ -33,7 +33,7 @@ "email": "support@microsoft.com", "_email": "[variables('email')]", "_solutionName": "AbuseIPDB", - "_solutionVersion": "3.0.1", + "_solutionVersion": "3.0.2", "solutionId": "azuresentinel.azure-sentinel-solution-abuseipdb", "_solutionId": "[variables('solutionId')]", "AbuseIPDBAPIConnector": "AbuseIPDBAPIConnector", @@ -54,10 +54,10 @@ "playbookId2": "[resourceId('Microsoft.Logic/workflows', variables('playbookContentId2'))]", "playbookTemplateSpecName2": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat(parameters('workspace'),'-pl-',uniquestring(variables('_playbookContentId2'))))]", "_playbookcontentProductId2": "[concat(take(variables('_solutionId'),50),'-','pl','-', uniqueString(concat(variables('_solutionId'),'-','Playbook','-',variables('_playbookContentId2'),'-', variables('playbookVersion2'))))]", - "AbuseIPDB-ReportaIPsToAbuselPDBAfterCheckingByUserInMSTeams": "AbuseIPDB-ReportaIPsToAbuselPDBAfterCheckingByUserInMSTeams", - "_AbuseIPDB-ReportaIPsToAbuselPDBAfterCheckingByUserInMSTeams": "[variables('AbuseIPDB-ReportaIPsToAbuselPDBAfterCheckingByUserInMSTeams')]", + "AbuseIPDB-ReportIPsAfterUserResponseInMSTeams": "AbuseIPDB-ReportIPsAfterUserResponseInMSTeams", + "_AbuseIPDB-ReportIPsAfterUserResponseInMSTeams": "[variables('AbuseIPDB-ReportIPsAfterUserResponseInMSTeams')]", "playbookVersion3": "1.0", - "playbookContentId3": "AbuseIPDB-ReportaIPsToAbuselPDBAfterCheckingByUserInMSTeams", + "playbookContentId3": "AbuseIPDB-ReportIPsAfterUserResponseInMSTeams", "_playbookContentId3": "[variables('playbookContentId3')]", "playbookId3": "[resourceId('Microsoft.Logic/workflows', variables('playbookContentId3'))]", "playbookTemplateSpecName3": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat(parameters('workspace'),'-pl-',uniquestring(variables('_playbookContentId3'))))]", @@ -82,7 +82,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "AbuseIPDBAPIConnector Playbook with template version 3.0.1", + "description": "AbuseIPDBAPIConnector Playbook with template version 3.0.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('playbookVersion1')]", @@ -640,7 +640,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "AbuseIPDB-EnrichIncidentByIPInfo Playbook with template version 3.0.1", + "description": "AbuseIPDB-EnrichIncidentByIPInfo Playbook with template version 3.0.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('playbookVersion2')]", @@ -735,27 +735,27 @@ "inputs": { "name": "Result List", "value": { - "abuseConfidenceScore": "@body('AuseIPDB_CHECK_Endpoint')?['data']?['abuseConfidenceScore']", - "hostnames": "@body('AuseIPDB_CHECK_Endpoint')?['data']?['hostnames']", - "ipAddress": "@body('AuseIPDB_CHECK_Endpoint')?['data']?['ipAddress']", - "ipVersion": "@body('AuseIPDB_CHECK_Endpoint')?['data']?['ipVersion']", - "isPublic": "@body('AuseIPDB_CHECK_Endpoint')?['data']?['isPublic']", - "isWhitelisted": "@body('AuseIPDB_CHECK_Endpoint')?['data']?['isWhitelisted']", - "isp": "@body('AuseIPDB_CHECK_Endpoint')?['data']?['isp']", - "lastReportedAt": "@body('AuseIPDB_CHECK_Endpoint')?['data']?['lastReportedAt']", - "numDistinctUsers": "@body('AuseIPDB_CHECK_Endpoint')?['data']?['numDistinctUsers']", - "totalReports": "@body('AuseIPDB_CHECK_Endpoint')?['data']?['totalReports']", - "usageType": "@body('AuseIPDB_CHECK_Endpoint')?['data']?['usageType']" + "abuseConfidenceScore": "@body('AbuseIPDB_CHECK_Endpoint')?['data']?['abuseConfidenceScore']", + "hostnames": "@body('AbuseIPDB_CHECK_Endpoint')?['data']?['hostnames']", + "ipAddress": "@body('AbuseIPDB_CHECK_Endpoint')?['data']?['ipAddress']", + "ipVersion": "@body('AbuseIPDB_CHECK_Endpoint')?['data']?['ipVersion']", + "isPublic": "@body('AbuseIPDB_CHECK_Endpoint')?['data']?['isPublic']", + "isWhitelisted": "@body('AbuseIPDB_CHECK_Endpoint')?['data']?['isWhitelisted']", + "isp": "@body('AbuseIPDB_CHECK_Endpoint')?['data']?['isp']", + "lastReportedAt": "@body('AbuseIPDB_CHECK_Endpoint')?['data']?['lastReportedAt']", + "numDistinctUsers": "@body('AbuseIPDB_CHECK_Endpoint')?['data']?['numDistinctUsers']", + "totalReports": "@body('AbuseIPDB_CHECK_Endpoint')?['data']?['totalReports']", + "usageType": "@body('AbuseIPDB_CHECK_Endpoint')?['data']?['usageType']" } }, "runAfter": { - "AuseIPDB_CHECK_Endpoint": [ + "AbuseIPDB_CHECK_Endpoint": [ "Succeeded" ] }, "type": "AppendToArrayVariable" }, - "AuseIPDB_CHECK_Endpoint": { + "AbuseIPDB_CHECK_Endpoint": { "inputs": { "host": { "connection": { @@ -814,7 +814,7 @@ "type": "ApiConnection" }, "Get_Logo": { - "inputs": " ", + "inputs": " ", "runAfter": { "Entities_-_Get_IPs": [ "Succeeded" @@ -974,14 +974,14 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "AbuseIPDB-ReportaIPsToAbuselPDBAfterCheckingByUserInMSTeams Playbook with template version 3.0.1", + "description": "AbuseIPDB-ReportIPsAfterUserResponseInMSTeams Playbook with template version 3.0.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('playbookVersion3')]", "parameters": { "PlaybookName": { - "defaultValue": "AbuseIPDB-ReportaIPsToAbuselPDBAfterCheckingByUserInMSTeams", - "type": "String" + "defaultValue": "AbuseIPDB-ReportIPsAfterUserResponseInMSTeams", + "type": "string" }, "customApis_AbuseIPDBAPI_name": { "defaultValue": "AbuseIPDBAPI", @@ -992,16 +992,16 @@ }, "MSTeamsGroupId": { "defaultValue": "", - "type": "String", + "type": "string", "metadata": { - "description": "Value of the MSTeamsGroupId parameter in the AbuseIPDB-ReportaIPsToAbuselPDBAfterCheckingByUserInMSTeams playbook. Id of the Teams Group where the adaptive card will be posted." + "description": "Value of the MSTeamsGroupId parameter in the AbuseIPDB-ReportIPsAfterUserResponseInMSTeams playbook. Id of the Teams Group where the adaptive card will be posted." } }, "MSTeamsChannelId": { "defaultValue": "", - "type": "String", + "type": "string", "metadata": { - "description": "Value of the MSTeamsChannelId parameter in the AbuseIPDB-ReportaIPsToAbuselPDBAfterCheckingByUserInMSTeams playbook. Id of the Teams Channel where the adaptive card will be posted." + "description": "Value of the MSTeamsChannelId parameter in the AbuseIPDB-ReportIPsAfterUserResponseInMSTeams playbook. Id of the Teams Channel where the adaptive card will be posted." } } }, @@ -1340,7 +1340,7 @@ "type": "Foreach" }, "Get_Logo": { - "inputs": " ", + "inputs": " ", "runAfter": { "Entities_-_Get_IPs": [ "Succeeded" @@ -1469,7 +1469,7 @@ } ], "metadata": { - "title": " AbuseIPDB Report a IPs To AbuselPDB After Checking By User In MSTeams", + "title": "AbuseIPDB Report IPs To AbuseIPDB After User Response In MSTeams", "description": "When a new sentinel incident is created, this playbook gets triggered and performs the following actions:\n 1. Sends an adaptive card to the Teams channel where the analyst can choose an action to be taken.", "prerequisites": [ "1. Prior to the deployment of this playbook, AbuseIPDB Connector needs to be deployed under the same subscription.", @@ -1495,7 +1495,7 @@ "releaseNotes": [ { "version": "1.0.0", - "title": "AbuseIPDB Report a IPs To AbuselPDB After Checking By User In MSTeams", + "title": "AbuseIPDB Report IPs To AbuseIPDB After User Response In MSTeams", "notes": [ "Initial version" ] @@ -1510,7 +1510,7 @@ "contentSchemaVersion": "3.0.0", "contentId": "[variables('_playbookContentId3')]", "contentKind": "Playbook", - "displayName": "AbuseIPDB-ReportaIPsToAbuselPDBAfterCheckingByUserInMSTeams", + "displayName": "AbuseIPDB-ReportIPsAfterUserResponseInMSTeams", "contentProductId": "[variables('_playbookcontentProductId3')]", "id": "[variables('_playbookcontentProductId3')]", "version": "[variables('playbookVersion3')]" @@ -1525,7 +1525,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "AbuseIPDB-BlacklistIpToThreatIntelligence Playbook with template version 3.0.1", + "description": "AbuseIPDB-BlacklistIpToThreatIntelligence Playbook with template version 3.0.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('playbookVersion4')]", @@ -1830,8 +1830,7 @@ "prerequisites": [ "1. AbuseIPDBAPI Custom Connector has to be deployed prior to the deployment of this playbook under the same subscription.", "2. To use the Microsoft Graph Security connector actions, Microsoft Entra ID tenant administrator consent needs to be provided. The Microsoft Graph Security connector application ID and name for Microsoft Entra ID follows for Microsoft Entra ID administrator consent:\n- Application Name - MicrosoftGraphSecurityConnector\n- Application ID - c4829704-0edc-4c3d-a347-7c4a67586f3c", - - "3. To view the Threat Indicators submitted by Microsoft Graph Security connector, 'Threat Intelligence Platforms' connector from 'Threat Intelligence' Solution need to be install." + "3. To view the Threat Indicators submitted by Microsoft Graph Security connector, 'Threat Intelligence Platforms' connector from 'Threat Intelligence' Solution need to be install." ], "preDeployment": [ "1. AbuseIPDB Custom Connector has to be deployed prior to the deployment of this playbook under the same subscription." @@ -1881,7 +1880,7 @@ "apiVersion": "2023-04-01-preview", "location": "[parameters('workspace-location')]", "properties": { - "version": "3.0.1", + "version": "3.0.2", "kind": "Solution", "contentSchemaVersion": "3.0.0", "displayName": "AbuseIPDB", @@ -1923,7 +1922,7 @@ }, { "kind": "Playbook", - "contentId": "[variables('_AbuseIPDB-ReportaIPsToAbuselPDBAfterCheckingByUserInMSTeams')]", + "contentId": "[variables('_AbuseIPDB-ReportIPsAfterUserResponseInMSTeams')]", "version": "[variables('playbookVersion3')]" }, { diff --git a/Solutions/AbuseIPDB/Playbooks/AbuseIPDB-EnrichIncidentByIPInfo/azuredeploy.json b/Solutions/AbuseIPDB/Playbooks/AbuseIPDB-EnrichIncidentByIPInfo/azuredeploy.json index fd7e06170d8..7a3a331fad8 100644 --- a/Solutions/AbuseIPDB/Playbooks/AbuseIPDB-EnrichIncidentByIPInfo/azuredeploy.json +++ b/Solutions/AbuseIPDB/Playbooks/AbuseIPDB-EnrichIncidentByIPInfo/azuredeploy.json @@ -130,27 +130,27 @@ "inputs":{ "name":"Result List", "value":{ - "abuseConfidenceScore":"@body('AuseIPDB_CHECK_Endpoint')?['data']?['abuseConfidenceScore']", - "hostnames":"@body('AuseIPDB_CHECK_Endpoint')?['data']?['hostnames']", - "ipAddress":"@body('AuseIPDB_CHECK_Endpoint')?['data']?['ipAddress']", - "ipVersion":"@body('AuseIPDB_CHECK_Endpoint')?['data']?['ipVersion']", - "isPublic":"@body('AuseIPDB_CHECK_Endpoint')?['data']?['isPublic']", - "isWhitelisted":"@body('AuseIPDB_CHECK_Endpoint')?['data']?['isWhitelisted']", - "isp":"@body('AuseIPDB_CHECK_Endpoint')?['data']?['isp']", - "lastReportedAt":"@body('AuseIPDB_CHECK_Endpoint')?['data']?['lastReportedAt']", - "numDistinctUsers":"@body('AuseIPDB_CHECK_Endpoint')?['data']?['numDistinctUsers']", - "totalReports":"@body('AuseIPDB_CHECK_Endpoint')?['data']?['totalReports']", - "usageType":"@body('AuseIPDB_CHECK_Endpoint')?['data']?['usageType']" + "abuseConfidenceScore":"@body('AbuseIPDB_CHECK_Endpoint')?['data']?['abuseConfidenceScore']", + "hostnames":"@body('AbuseIPDB_CHECK_Endpoint')?['data']?['hostnames']", + "ipAddress":"@body('AbuseIPDB_CHECK_Endpoint')?['data']?['ipAddress']", + "ipVersion":"@body('AbuseIPDB_CHECK_Endpoint')?['data']?['ipVersion']", + "isPublic":"@body('AbuseIPDB_CHECK_Endpoint')?['data']?['isPublic']", + "isWhitelisted":"@body('AbuseIPDB_CHECK_Endpoint')?['data']?['isWhitelisted']", + "isp":"@body('AbuseIPDB_CHECK_Endpoint')?['data']?['isp']", + "lastReportedAt":"@body('AbuseIPDB_CHECK_Endpoint')?['data']?['lastReportedAt']", + "numDistinctUsers":"@body('AbuseIPDB_CHECK_Endpoint')?['data']?['numDistinctUsers']", + "totalReports":"@body('AbuseIPDB_CHECK_Endpoint')?['data']?['totalReports']", + "usageType":"@body('AbuseIPDB_CHECK_Endpoint')?['data']?['usageType']" } }, "runAfter":{ - "AuseIPDB_CHECK_Endpoint":[ + "AbuseIPDB_CHECK_Endpoint":[ "Succeeded" ] }, "type":"AppendToArrayVariable" }, - "AuseIPDB_CHECK_Endpoint":{ + "AbuseIPDB_CHECK_Endpoint":{ "inputs":{ "host":{ "connection":{ @@ -215,7 +215,7 @@ "type":"ApiConnection" }, "Get_Logo":{ - "inputs":" ", + "inputs":" ", "runAfter":{ "Entities_-_Get_IPs":[ "Succeeded" @@ -288,4 +288,4 @@ } } ] -} \ No newline at end of file +} diff --git a/Solutions/AbuseIPDB/Playbooks/AbuseIPDB-ReportaIPsToAbuselPDBAfterCheckingByUserInMSTeams/azuredeploy.json b/Solutions/AbuseIPDB/Playbooks/AbuseIPDB-ReportIPsAfterUserResponseInMSTeams/azuredeploy.json similarity index 96% rename from Solutions/AbuseIPDB/Playbooks/AbuseIPDB-ReportaIPsToAbuselPDBAfterCheckingByUserInMSTeams/azuredeploy.json rename to Solutions/AbuseIPDB/Playbooks/AbuseIPDB-ReportIPsAfterUserResponseInMSTeams/azuredeploy.json index 30b806bea6a..16d984f1dee 100644 --- a/Solutions/AbuseIPDB/Playbooks/AbuseIPDB-ReportaIPsToAbuselPDBAfterCheckingByUserInMSTeams/azuredeploy.json +++ b/Solutions/AbuseIPDB/Playbooks/AbuseIPDB-ReportIPsAfterUserResponseInMSTeams/azuredeploy.json @@ -2,7 +2,7 @@ "$schema":"https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion":"1.0.0.0", "metadata": { - "title": " AbuseIPDB Report a IPs To AbuselPDB After Checking By User In MSTeams", + "title": "AbuseIPDB Report IPs To AbuseIPDB After User Response In MSTeams", "description": "When a new sentinel incident is created, this playbook gets triggered and performs the following actions:\n 1. Sends an adaptive card to the Teams channel where the analyst can choose an action to be taken.", "prerequisites": [ @@ -37,15 +37,15 @@ "releaseNotes": [ { "version": "1.0.0", - "title": "AbuseIPDB Report a IPs To AbuselPDB After Checking By User In MSTeams", + "title": "AbuseIPDB Report IPs To AbuseIPDB After User Response In MSTeams", "notes": [ "Initial version" ] } ] }, "parameters":{ "PlaybookName":{ - "defaultValue":"AbuseIPDB-ReportaIPsToAbuselPDBAfterCheckingByUserInMSTeams", - "type":"String" + "defaultValue":"AbuseIPDB-ReportIPsAfterUserResponseInMSTeams", + "type":"string" }, "customApis_AbuseIPDBAPI_name": { "defaultValue": "AbuseIPDBAPI", @@ -56,16 +56,16 @@ }, "MSTeamsGroupId":{ "defaultValue":"", - "type":"String", + "type":"string", "metadata": { - "description": "Value of the MSTeamsGroupId parameter in the AbuseIPDB-ReportaIPsToAbuselPDBAfterCheckingByUserInMSTeams playbook. Id of the Teams Group where the adaptive card will be posted." + "description": "Value of the MSTeamsGroupId parameter in the AbuseIPDB-ReportIPsAfterUserResponseInMSTeams playbook. Id of the Teams Group where the adaptive card will be posted." } }, "MSTeamsChannelId":{ "defaultValue":"", - "type":"String", + "type":"string", "metadata": { - "description": "Value of the MSTeamsChannelId parameter in the AbuseIPDB-ReportaIPsToAbuselPDBAfterCheckingByUserInMSTeams playbook. Id of the Teams Channel where the adaptive card will be posted." + "description": "Value of the MSTeamsChannelId parameter in the AbuseIPDB-ReportIPsAfterUserResponseInMSTeams playbook. Id of the Teams Channel where the adaptive card will be posted." } } @@ -414,7 +414,7 @@ "type":"Foreach" }, "Get_Logo":{ - "inputs":" ", + "inputs":" ", "runAfter":{ "Entities_-_Get_IPs":[ "Succeeded" @@ -510,4 +510,4 @@ } } ] -} \ No newline at end of file +} diff --git a/Solutions/AbuseIPDB/Playbooks/AbuseIPDB-ReportaIPsToAbuselPDBAfterCheckingByUserInMSTeams/playbook_screenshot.png b/Solutions/AbuseIPDB/Playbooks/AbuseIPDB-ReportIPsAfterUserResponseInMSTeams/playbook_screenshot.png similarity index 100% rename from Solutions/AbuseIPDB/Playbooks/AbuseIPDB-ReportaIPsToAbuselPDBAfterCheckingByUserInMSTeams/playbook_screenshot.png rename to Solutions/AbuseIPDB/Playbooks/AbuseIPDB-ReportIPsAfterUserResponseInMSTeams/playbook_screenshot.png diff --git a/Solutions/AbuseIPDB/Playbooks/AbuseIPDB-ReportaIPsToAbuselPDBAfterCheckingByUserInMSTeams/readme.md b/Solutions/AbuseIPDB/Playbooks/AbuseIPDB-ReportIPsAfterUserResponseInMSTeams/readme.md similarity index 81% rename from Solutions/AbuseIPDB/Playbooks/AbuseIPDB-ReportaIPsToAbuselPDBAfterCheckingByUserInMSTeams/readme.md rename to Solutions/AbuseIPDB/Playbooks/AbuseIPDB-ReportIPsAfterUserResponseInMSTeams/readme.md index 1e15b947a70..9ab773867a6 100644 --- a/Solutions/AbuseIPDB/Playbooks/AbuseIPDB-ReportaIPsToAbuselPDBAfterCheckingByUserInMSTeams/readme.md +++ b/Solutions/AbuseIPDB/Playbooks/AbuseIPDB-ReportIPsAfterUserResponseInMSTeams/readme.md @@ -1,4 +1,4 @@ -# AbuseIPDB-ReportaIPsToAbuselPDBAfterCheckingByUserInMSTeams +# AbuseIPDB-ReportIPsAfterUserResponseInMSTeams ## Summary @@ -27,7 +27,7 @@ When a new sentinel incident is created, this playbook gets triggered and perfor * Teams Group Id: Id of the Teams Group where the adaptive card will be posted * Teams Channel Id: Id of the Teams Channel where the adaptive card will be posted -[![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FAbuseIPDB%2FPlaybooks%2FAbuseIPDB-ReportaIPsToAbuselPDBAfterCheckingByUserInMSTeams%2Fazuredeploy.json) [![Deploy to Azure](https://aka.ms/deploytoazuregovbutton)](https://portal.azure.us/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FAbuseIPDB%2FPlaybooks%2FAbuseIPDB-ReportaIPsToAbuselPDBAfterCheckingByUserInMSTeams%2Fazuredeploy.json) +[![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FAbuseIPDB%2FPlaybooks%2FAbuseIPDB-ReportIPsAfterUserResponseInMSTeams%2Fazuredeploy.json) [![Deploy to Azure](https://aka.ms/deploytoazuregovbutton)](https://portal.azure.us/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FAbuseIPDB%2FPlaybooks%2FAbuseIPDB-ReportIPsAfterUserResponseInMSTeams%2Fazuredeploy.json) ### Post-Deployment instructions diff --git a/Solutions/AbuseIPDB/Playbooks/AbuseIPDB-ReportaIPsToAbuselPDBAfterCheckingByUserInMSTeams/teams_screenshot.png b/Solutions/AbuseIPDB/Playbooks/AbuseIPDB-ReportIPsAfterUserResponseInMSTeams/teams_screenshot.png similarity index 100% rename from Solutions/AbuseIPDB/Playbooks/AbuseIPDB-ReportaIPsToAbuselPDBAfterCheckingByUserInMSTeams/teams_screenshot.png rename to Solutions/AbuseIPDB/Playbooks/AbuseIPDB-ReportIPsAfterUserResponseInMSTeams/teams_screenshot.png diff --git a/Solutions/AbuseIPDB/Playbooks/azuredeploy.json b/Solutions/AbuseIPDB/Playbooks/azuredeploy.json index 15bb96fc88e..43bbc31465a 100644 --- a/Solutions/AbuseIPDB/Playbooks/azuredeploy.json +++ b/Solutions/AbuseIPDB/Playbooks/azuredeploy.json @@ -16,8 +16,8 @@ "description": "Name of the Playbook" } }, - "AbuseIPDB-ReportaIPsToAbuselPDBAfterCheckingByUserInMSTeams_Playbook_Name": { - "defaultValue": "AbuseIPDB-ReportaIPsToAbuseIPDBAfterCheckingByUserInMSTeams", + "AbuseIPDB-ReportIPsAfterUserResponseInMSTeams_Playbook_Name": { + "defaultValue": "AbuseIPDB-ReportIPsAfterUserResponseInMSTeams", "type": "string", "metadata": { "description": "Name of the Playbook" @@ -27,14 +27,14 @@ "defaultValue":"", "type":"String", "metadata": { - "description": "Value of the MSTeamsGroupId parameter in the AbuseIPDB-ReportaIPsToAbuselPDBAfterCheckingByUserInMSTeams playbook. Id of the Teams Group where the adaptive card will be posted." + "description": "Value of the MSTeamsGroupId parameter in the AbuseIPDB-ReportIPsAfterUserResponseInMSTeams playbook. Id of the Teams Group where the adaptive card will be posted." } }, "MSTeamsChannelId":{ "defaultValue":"", "type":"String", "metadata": { - "description": "Value of the MSTeamsChannelId parameter in the AbuseIPDB-ReportaIPsToAbuselPDBAfterCheckingByUserInMSTeams playbook. Id of the Teams Channel where the adaptive card will be posted." + "description": "Value of the MSTeamsChannelId parameter in the AbuseIPDB-ReportIPsAfterUserResponseInMSTeams playbook. Id of the Teams Channel where the adaptive card will be posted." } } }, @@ -100,11 +100,11 @@ "properties": { "mode": "Incremental", "templateLink": { - "uri": "https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/AbuseIPDB/Playbooks/AbuseIPDB-ReportaIPsToAbuselPDBAfterCheckingByUserInMSTeams/azuredeploy.json" + "uri": "https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/AbuseIPDB/Playbooks/AbuseIPDB-ReportIPsAfterUserResponseInMSTeams/azuredeploy.json" }, "parameters": { "PlaybookName": { - "Value": "[parameters('AbuseIPDB-ReportaIPsToAbuselPDBAfterCheckingByUserInMSTeams_Playbook_Name')]" + "Value": "[parameters('AbuseIPDB-ReportIPsAfterUserResponseInMSTeams_Playbook_Name')]" }, "MSTeamsGroupId": { "Value": "[parameters('MSTeamsGroupId')]" diff --git a/Solutions/AbuseIPDB/Playbooks/readme.md b/Solutions/AbuseIPDB/Playbooks/readme.md index 6eb03fb2f08..441eada8930 100644 --- a/Solutions/AbuseIPDB/Playbooks/readme.md +++ b/Solutions/AbuseIPDB/Playbooks/readme.md @@ -30,7 +30,7 @@ This package includes: * These three playbook templates leverage AbuseIPDB custom connector: * [Response – blacklist IP`s to tiIndicators](./Playbooks/AbuseIPDB-BlacklistIpToThreatIntelligence) - used to stream IOCs via Microsoft Graph Security tiIndicators API from the AbuseIPDB. * [Response - enrich incedent by IP info](./Playbooks/AbuseIPDB-EnrichIncidentByIPInfo) - get information about IP from AbuseIPDB and add to the incident comments. - * [Response - Report IP to AbuseIPDB from the incident](./Playbooks/AbuseIPDB-ReportaIPsToAbuselPDBAfterCheckingByUserInMSTeams) - Report IP to AbuseIPDB from incident after user approval in Teams. + * [Response - Report IP to AbuseIPDB from the incident](./Playbooks/AbuseIPDB-ReportIPsAfterUserResponseInMSTeams) - Report IP to AbuseIPDB from incident after user approval in Teams. You can choose to deploy the whole package: connectors + all three playbook templates, or each one seperately from its specific folder. @@ -62,9 +62,9 @@ For obtain API Key [follow the instructions](https://www.abuseipdb.com/api.html) |**For Playbooks**| |**AbuseIPDB-BlacklistIpToThreatIntelligence Playbook Name** | Enter the playbook name here (e.g. AbuseIPDB-BlacklistIpToThreatIntelligence)| |**AbuseIPDB-EnrichIncidentByIPInfo Playbook Name** | Enter the playbook name here (e.g. AbuseIPDB-EnrichIncidentByIPInfo)| -|**AbuseIPDB-ReportaIPsToAbuselPDBAfterCheckingByUserInMSTeams Playbook Name** | Enter the playbook name here (e.g. AbuseIPDB-ReportaIPsToAbuselPDBAfterCheckingByUserInMSTeams)| -|**MSTeamsGroupId** | Value of TeamsGroupId parameter in AbuseIPDB-ReportaIPsToAbuselPDBAfterCheckingByUserInMSTeams playbook. Id of the Teams Group where the adaptive card will be posted.| -|**MSTeamsChannelId** | Value of TeamsChannelId parameter in AbuseIPDB-ReportaIPsToAbuselPDBAfterCheckingByUserInMSTeams playbook. Id of the Teams Channel where the adaptive card will be posted.| +|**AbuseIPDB-ReportIPsAfterUserResponseInMSTeams Playbook Name** | Enter the playbook name here (e.g. AbuseIPDB-ReportIPsAfterUserResponseInMSTeams)| +|**MSTeamsGroupId** | Value of TeamsGroupId parameter in AbuseIPDB-ReportIPsAfterUserResponseInMSTeams playbook. Id of the Teams Group where the adaptive card will be posted.| +|**MSTeamsChannelId** | Value of TeamsChannelId parameter in AbuseIPDB-ReportIPsAfterUserResponseInMSTeams playbook. Id of the Teams Channel where the adaptive card will be posted.|
diff --git a/Solutions/AbuseIPDB/ReleaseNotes.md b/Solutions/AbuseIPDB/ReleaseNotes.md index 70af3c1399d..0fda25f1654 100644 --- a/Solutions/AbuseIPDB/ReleaseNotes.md +++ b/Solutions/AbuseIPDB/ReleaseNotes.md @@ -1,5 +1,6 @@ | **Version** | **Date Modified (DD-MM-YYYY)** | **Change History** | |-------------|--------------------------------|----------------------------------------------------------------------------------| +| 3.0.2 | 09-12-2025 | Fix typos and update img Source in AbuseIPDB **Playbook** Solutions | | 3.0.1 | 29-03-2024 | Updated **playbook** description and corrected sentense formatting | | 3.0.0 | 31-07-2023 | Updated prerequisites for AbuseIPDB-BlacklistIpToThreatIntelligence **playbook** | | | | Modified text as there is rebranding from Azure Active Directory to Microsoft Entra ID. | \ No newline at end of file diff --git a/Solutions/ContentHubSolutionsCatalog.md b/Solutions/ContentHubSolutionsCatalog.md index 7c0c07849e4..56d16a3a883 100644 --- a/Solutions/ContentHubSolutionsCatalog.md +++ b/Solutions/ContentHubSolutionsCatalog.md @@ -76,7 +76,7 @@ This file is a catalog of all solutions and standalone content templates that ex |AbuseIPDB|The AbuseIPDB solution for Microsoft Sentinel allows you to check the reputation of IP addresses in log data and perform automated actions like enriching a Microsoft Sentinel incident by IP reputation information, add blacklisted IP addresses to ThreatIntelligenceIndicator table and reporting IPs to Abuse IPDB based on a user response in Teams.Custom Azure Logic Apps Connectors: 1, Playbooks: 3| |LogicAppsCustomConnector| |Solution| | | |AbuseIPDB Blacklist Ip To Threat Intelligence|Playbook|By every day reccurence, this playbook gets triggered and performs the following actions: 1. Gets [list](https://docs.abuseipdb.com/#blacklist-endpoint) of the most reported IP addresses form the Blacklist Endpoint.|Solution| | | |AbuseIPDB Enrich Incident By IP Info|Playbook|Once a new sentinal incident is created, this playbook gets triggered and performs the following actions: 1. [Gets Information](https://docs.abuseipdb.com/#check-endpoint) from AbuseIPDB by IP`s, provided in the alert custom entities. 2. Enriches the incident with the obtained info.|Solution| -| | |AbuseIPDB Report a IPs To AbuselPDB After Checking By User In MSTeams|Playbook|When a new sentinel incident is created, this playbook gets triggered and performs the following actions: 1. Sends an adaptive card to the Teams channel where the analyst can choose an action to be taken.|Solution| +| | |AbuseIPDB Report a IPs To AbuseIPDB After Checking By User In MSTeams|Playbook|When a new sentinel incident is created, this playbook gets triggered and performs the following actions: 1. Sends an adaptive card to the Teams channel where the analyst can choose an action to be taken.|Solution| |Akamai Security|The Akamai Security Solution for Microsoft Sentinel enables ingestion of Akamai Security Solutions events using the Common Event Format (CEF) into Microsoft Sentinel for Security Monitoring.Underlying Microsoft Technologies used:This solution takes a dependency on the following technologies, and some of these dependencies either may be in Preview state or might result in additional ingestion or operational costs:a. Common Event Format (CEF) formatted logs in Microsoft SentinelData Connectors: 1, Parsers: 1"| |Akamai Security Events|DataConnector|Akamai Solution for Sentinel provides the capability to ingest [Akamai Security Events](https://www.akamai.com/us/en/products/security/) into Microsoft Sentinel. Refer to [Akamai SIEM Integration documentation](https://developer.akamai.com/tools/integrations/siem) for more information.|Solution| | | |AkamaiSIEMEvent|Parser| |Solution| |Alibaba Cloud|The Alibaba Cloud solution provides the capability to retrieve logs from cloud applications using the Cloud API and store events into Microsoft Sentinel through the REST API.Underlying Microsoft Technologies used:This solution takes a dependency on the following technologies, and some of these dependencies either may be in Preview state or might result in additional ingestion or operational costs:a. Azure Monitor HTTP Data Collector APIb. Azure FunctionsData Connectors: 1, Parsers: 1|AliCloud (using Azure Functions)|DataConnector|The [AliCloud](https://www.alibabacloud.com/product/log-service) data connector provides the capability to retrieve logs from cloud applications using the Cloud API and store events into Microsoft Sentinel through the [REST API](https://aliyun-log-python-sdk.readthedocs.io/api.html). The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.|Solution| diff --git a/Solutions/Corelight/Data/Solution_Corelight.json b/Solutions/Corelight/Data/Solution_Corelight.json index abdf3f402ff..db4278a9a93 100644 --- a/Solutions/Corelight/Data/Solution_Corelight.json +++ b/Solutions/Corelight/Data/Solution_Corelight.json @@ -148,7 +148,13 @@ "Parsers/corelight_corelight_metrics_zeek_doctor.yaml", "Parsers/corelight_suri_aggregations.yaml", "Parsers/corelight_anomaly.yaml", - "Parsers/corelight_first_seen.yaml" + "Parsers/corelight_first_seen.yaml", + "Parsers/corelight_conn_agg.yaml", + "Parsers/corelight_dns_agg.yaml", + "Parsers/corelight_files_agg.yaml", + "Parsers/corelight_http_agg.yaml", + "Parsers/corelight_ssl_agg.yaml", + "Parsers/corelight_weird_agg.yaml" ], "Hunting Queries": [ "Hunting Queries/CorelightAbnormalEmailSubject.yaml", @@ -163,7 +169,7 @@ "Hunting Queries/CorelightRepetitiveDnsFailures.yaml" ], "BasePath": "C:/Github/Azure-Sentinel/Solutions/Corelight", - "Version": "3.2.1", + "Version": "3.2.2", "Metadata": "SolutionMetadata.json", "TemplateSpec": true, "Is1Pconnector": false diff --git a/Solutions/Corelight/Package/3.2.2.zip b/Solutions/Corelight/Package/3.2.2.zip new file mode 100644 index 00000000000..4cc58fd2c8d Binary files /dev/null and b/Solutions/Corelight/Package/3.2.2.zip differ diff --git a/Solutions/Corelight/Package/createUiDefinition.json b/Solutions/Corelight/Package/createUiDefinition.json index ea0f6564b5b..80e6d262801 100644 --- a/Solutions/Corelight/Package/createUiDefinition.json +++ b/Solutions/Corelight/Package/createUiDefinition.json @@ -6,7 +6,7 @@ "config": { "isWizard": false, "basics": { - "description": "\n\n**Note:** Please refer to the following before installing the solution: \n\n• Review the solution [Release Notes](https://github.com/Azure/Azure-Sentinel/tree/master/Solutions/Corelight/ReleaseNotes.md)\n\n • There may be [known issues](https://aka.ms/sentinelsolutionsknownissues) pertaining to this Solution, please refer to them before installing.\n\nThe [Corelight](https://corelight.com/) solution provides the capability to ingest events from [Zeek](https://zeek.org/) and [Suricata](https://suricata.io/) via Corelight Sensors into Microsoft Sentinel.\n\n**Underlying Microsoft Technologies used:**\n\nThis solution takes a dependency on the following technologies, and some of these dependencies either may be in [Preview](https://azure.microsoft.com/support/legal/preview-supplemental-terms/) state or might result in additional ingestion or operational costs:\n\na. [Agent based logs collection from Windows and Linux machines](https://docs.microsoft.com/azure/azure-monitor/agents/data-sources-custom-logs)\n\n**Data Connectors:** 1, **Parsers:** 116, **Workbooks:** 5, **Analytic Rules:** 10, **Hunting Queries:** 10, **Watchlists:** 4\n\n[Learn more about Microsoft Sentinel](https://aka.ms/azuresentinel) | [Learn more about Solutions](https://aka.ms/azuresentinelsolutionsdoc)", + "description": "\n\n**Note:** Please refer to the following before installing the solution: \n\n• Review the solution [Release Notes](https://github.com/Azure/Azure-Sentinel/tree/master/Solutions/Corelight/ReleaseNotes.md)\n\n • There may be [known issues](https://aka.ms/sentinelsolutionsknownissues) pertaining to this Solution, please refer to them before installing.\n\nThe [Corelight](https://corelight.com/) solution provides the capability to ingest events from [Zeek](https://zeek.org/) and [Suricata](https://suricata.io/) via Corelight Sensors into Microsoft Sentinel.\n\n**Underlying Microsoft Technologies used:**\n\nThis solution takes a dependency on the following technologies, and some of these dependencies either may be in [Preview](https://azure.microsoft.com/support/legal/preview-supplemental-terms/) state or might result in additional ingestion or operational costs:\n\na. [Agent based logs collection from Windows and Linux machines](https://docs.microsoft.com/azure/azure-monitor/agents/data-sources-custom-logs)\n\n**Data Connectors:** 1, **Parsers:** 122, **Workbooks:** 5, **Analytic Rules:** 10, **Hunting Queries:** 10, **Watchlists:** 4\n\n[Learn more about Microsoft Sentinel](https://aka.ms/azuresentinel) | [Learn more about Solutions](https://aka.ms/azuresentinelsolutionsdoc)", "subscription": { "resourceProviders": [ "Microsoft.OperationsManagement/solutions", diff --git a/Solutions/Corelight/Package/mainTemplate.json b/Solutions/Corelight/Package/mainTemplate.json index 9133cc32041..4a014ab2af6 100644 --- a/Solutions/Corelight/Package/mainTemplate.json +++ b/Solutions/Corelight/Package/mainTemplate.json @@ -105,7 +105,7 @@ "email": "info@corelight.com", "_email": "[variables('email')]", "_solutionName": "Corelight", - "_solutionVersion": "3.2.1", + "_solutionVersion": "3.2.2", "solutionId": "corelightinc1584998267292.corelight-for-azure-sentinel", "_solutionId": "[variables('solutionId')]", "workbookVersion1": "1.0.0", @@ -1038,6 +1038,48 @@ "parserVersion116": "1.0.0", "parserContentId116": "corelight_first_seen-Parser" }, + "parserObject117": { + "_parserName117": "[concat(parameters('workspace'),'/','corelight_conn_agg')]", + "_parserId117": "[resourceId('Microsoft.OperationalInsights/workspaces/savedSearches', parameters('workspace'), 'corelight_conn_agg')]", + "parserTemplateSpecName117": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat(parameters('workspace'),'-pr-',uniquestring('corelight_conn_agg-Parser')))]", + "parserVersion117": "1.1.0", + "parserContentId117": "corelight_conn_agg-Parser" + }, + "parserObject118": { + "_parserName118": "[concat(parameters('workspace'),'/','corelight_dns_agg')]", + "_parserId118": "[resourceId('Microsoft.OperationalInsights/workspaces/savedSearches', parameters('workspace'), 'corelight_dns_agg')]", + "parserTemplateSpecName118": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat(parameters('workspace'),'-pr-',uniquestring('corelight_dns_agg-Parser')))]", + "parserVersion118": "1.1.0", + "parserContentId118": "corelight_dns_agg-Parser" + }, + "parserObject119": { + "_parserName119": "[concat(parameters('workspace'),'/','corelight_files_agg')]", + "_parserId119": "[resourceId('Microsoft.OperationalInsights/workspaces/savedSearches', parameters('workspace'), 'corelight_files_agg')]", + "parserTemplateSpecName119": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat(parameters('workspace'),'-pr-',uniquestring('corelight_files_agg-Parser')))]", + "parserVersion119": "1.1.0", + "parserContentId119": "corelight_files_agg-Parser" + }, + "parserObject120": { + "_parserName120": "[concat(parameters('workspace'),'/','corelight_http_agg')]", + "_parserId120": "[resourceId('Microsoft.OperationalInsights/workspaces/savedSearches', parameters('workspace'), 'corelight_http_agg')]", + "parserTemplateSpecName120": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat(parameters('workspace'),'-pr-',uniquestring('corelight_http_agg-Parser')))]", + "parserVersion120": "1.1.0", + "parserContentId120": "corelight_http_agg-Parser" + }, + "parserObject121": { + "_parserName121": "[concat(parameters('workspace'),'/','corelight_ssl_agg')]", + "_parserId121": "[resourceId('Microsoft.OperationalInsights/workspaces/savedSearches', parameters('workspace'), 'corelight_ssl_agg')]", + "parserTemplateSpecName121": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat(parameters('workspace'),'-pr-',uniquestring('corelight_ssl_agg-Parser')))]", + "parserVersion121": "1.1.0", + "parserContentId121": "corelight_ssl_agg-Parser" + }, + "parserObject122": { + "_parserName122": "[concat(parameters('workspace'),'/','corelight_weird_agg')]", + "_parserId122": "[resourceId('Microsoft.OperationalInsights/workspaces/savedSearches', parameters('workspace'), 'corelight_weird_agg')]", + "parserTemplateSpecName122": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat(parameters('workspace'),'-pr-',uniquestring('corelight_weird_agg-Parser')))]", + "parserVersion122": "1.1.0", + "parserContentId122": "corelight_weird_agg-Parser" + }, "huntingQueryObject1": { "huntingQueryVersion1": "1.0.0", "_huntingQuerycontentId1": "e9441d57-39f4-41fb-aaad-d02e47783d1a", @@ -1100,7 +1142,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "Corelight Workbook with template version 3.2.1", + "description": "Corelight Workbook with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('workbookVersion1')]", @@ -1188,7 +1230,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "Corelight_Alert_Aggregations Workbook with template version 3.2.1", + "description": "Corelight_Alert_Aggregations Workbook with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('workbookVersion2')]", @@ -1332,7 +1374,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "Corelight_Data_Explorer Workbook with template version 3.2.1", + "description": "Corelight_Data_Explorer Workbook with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('workbookVersion3')]", @@ -1476,7 +1518,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "Corelight_Security_Workflow Workbook with template version 3.2.1", + "description": "Corelight_Security_Workflow Workbook with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('workbookVersion4')]", @@ -1648,7 +1690,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "Corelight_Sensor_Overview Workbook with template version 3.2.1", + "description": "Corelight_Sensor_Overview Workbook with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('workbookVersion5')]", @@ -1824,7 +1866,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "CorelightC2RepetitiveFailures_AnalyticalRules Analytics Rule with template version 3.2.1", + "description": "CorelightC2RepetitiveFailures_AnalyticalRules Analytics Rule with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('analyticRuleObject1').analyticRuleVersion1]", @@ -1852,13 +1894,13 @@ "status": "Available", "requiredDataConnectors": [ { - "connectorId": "Corelight", "dataTypes": [ "Corelight_v2_dns", "Corelight_v2_dns_red", "corelight_dns", "corelight_dns_red" - ] + ], + "connectorId": "Corelight" } ], "tactics": [ @@ -1869,13 +1911,13 @@ ], "entityMappings": [ { - "entityType": "IP", "fieldMappings": [ { "columnName": "id_orig_h", "identifier": "Address" } - ] + ], + "entityType": "IP" } ] } @@ -1930,7 +1972,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "CorelightExternalProxyDetected_AnalyticalRules Analytics Rule with template version 3.2.1", + "description": "CorelightExternalProxyDetected_AnalyticalRules Analytics Rule with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('analyticRuleObject2').analyticRuleVersion2]", @@ -1958,11 +2000,11 @@ "status": "Available", "requiredDataConnectors": [ { - "connectorId": "Corelight", "dataTypes": [ "Corelight_v2_http", "corelight_http" - ] + ], + "connectorId": "Corelight" } ], "tactics": [ @@ -1974,13 +2016,13 @@ ], "entityMappings": [ { - "entityType": "IP", "fieldMappings": [ { "columnName": "id_orig_h", "identifier": "Address" } - ] + ], + "entityType": "IP" } ] } @@ -2035,7 +2077,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "CorelightForcedExternalOutboundSMB_AnalyticalRules Analytics Rule with template version 3.2.1", + "description": "CorelightForcedExternalOutboundSMB_AnalyticalRules Analytics Rule with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('analyticRuleObject3').analyticRuleVersion3]", @@ -2063,13 +2105,13 @@ "status": "Available", "requiredDataConnectors": [ { - "connectorId": "Corelight", "dataTypes": [ "Corelight_v2_conn", "Corelight_v2_conn_red", "corelight_conn", "corelight_conn_red" - ] + ], + "connectorId": "Corelight" } ], "tactics": [ @@ -2080,13 +2122,13 @@ ], "entityMappings": [ { - "entityType": "IP", "fieldMappings": [ { "columnName": "id_orig_h", "identifier": "Address" } - ] + ], + "entityType": "IP" } ] } @@ -2141,7 +2183,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "CorelightMultipleCompressedFilesTransferredOverHTTP_AnalyticalRules Analytics Rule with template version 3.2.1", + "description": "CorelightMultipleCompressedFilesTransferredOverHTTP_AnalyticalRules Analytics Rule with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('analyticRuleObject4').analyticRuleVersion4]", @@ -2169,11 +2211,11 @@ "status": "Available", "requiredDataConnectors": [ { - "connectorId": "Corelight", "dataTypes": [ "Corelight_v2_http", "corelight_http" - ] + ], + "connectorId": "Corelight" } ], "tactics": [ @@ -2184,13 +2226,13 @@ ], "entityMappings": [ { - "entityType": "IP", "fieldMappings": [ { "columnName": "id_orig_h", "identifier": "Address" } - ] + ], + "entityType": "IP" } ] } @@ -2245,7 +2287,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "CorelightMultipleFilesSentOverHTTPAbnormalRequests_AnalyticalRules Analytics Rule with template version 3.2.1", + "description": "CorelightMultipleFilesSentOverHTTPAbnormalRequests_AnalyticalRules Analytics Rule with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('analyticRuleObject5').analyticRuleVersion5]", @@ -2273,11 +2315,11 @@ "status": "Available", "requiredDataConnectors": [ { - "connectorId": "Corelight", "dataTypes": [ "Corelight_v2_http", "corelight_http" - ] + ], + "connectorId": "Corelight" } ], "tactics": [ @@ -2288,13 +2330,13 @@ ], "entityMappings": [ { - "entityType": "IP", "fieldMappings": [ { "columnName": "id_orig_h", "identifier": "Address" } - ] + ], + "entityType": "IP" } ] } @@ -2349,7 +2391,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "CorelightNetworkServiceScanning_AnalyticalRules Analytics Rule with template version 3.2.1", + "description": "CorelightNetworkServiceScanning_AnalyticalRules Analytics Rule with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('analyticRuleObject6').analyticRuleVersion6]", @@ -2377,13 +2419,13 @@ "status": "Available", "requiredDataConnectors": [ { - "connectorId": "Corelight", "dataTypes": [ "Corelight_v2_conn", "Corelight_v2_conn_red", "corelight_conn", "corelight_conn_red" - ] + ], + "connectorId": "Corelight" } ], "tactics": [ @@ -2394,13 +2436,13 @@ ], "entityMappings": [ { - "entityType": "IP", "fieldMappings": [ { "columnName": "id_orig_h", "identifier": "Address" } - ] + ], + "entityType": "IP" } ] } @@ -2455,7 +2497,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "CorelightPossibleWebshell_AnalyticalRules Analytics Rule with template version 3.2.1", + "description": "CorelightPossibleWebshell_AnalyticalRules Analytics Rule with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('analyticRuleObject7').analyticRuleVersion7]", @@ -2483,11 +2525,11 @@ "status": "Available", "requiredDataConnectors": [ { - "connectorId": "Corelight", "dataTypes": [ "Corelight_v2_http", "corelight_http" - ] + ], + "connectorId": "Corelight" } ], "tactics": [ @@ -2498,13 +2540,13 @@ ], "entityMappings": [ { - "entityType": "IP", "fieldMappings": [ { "columnName": "id_orig_h", "identifier": "Address" } - ] + ], + "entityType": "IP" } ] } @@ -2559,7 +2601,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "CorelightPossibleWebshellRarePOST_AnalyticalRules Analytics Rule with template version 3.2.1", + "description": "CorelightPossibleWebshellRarePOST_AnalyticalRules Analytics Rule with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('analyticRuleObject8').analyticRuleVersion8]", @@ -2587,11 +2629,11 @@ "status": "Available", "requiredDataConnectors": [ { - "connectorId": "Corelight", "dataTypes": [ "Corelight_v2_http", "corelight_http" - ] + ], + "connectorId": "Corelight" } ], "tactics": [ @@ -2602,13 +2644,13 @@ ], "entityMappings": [ { - "entityType": "IP", "fieldMappings": [ { "columnName": "id_orig_h", "identifier": "Address" } - ] + ], + "entityType": "IP" } ] } @@ -2663,7 +2705,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "CorelightSMTPEmailSubjectNonAsciiCharacters_AnalyticalRules Analytics Rule with template version 3.2.1", + "description": "CorelightSMTPEmailSubjectNonAsciiCharacters_AnalyticalRules Analytics Rule with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('analyticRuleObject9').analyticRuleVersion9]", @@ -2691,11 +2733,11 @@ "status": "Available", "requiredDataConnectors": [ { - "connectorId": "Corelight", "dataTypes": [ "Corelight_v2_smtp", "corelight_smtp" - ] + ], + "connectorId": "Corelight" } ], "tactics": [ @@ -2706,13 +2748,13 @@ ], "entityMappings": [ { - "entityType": "MailMessage", "fieldMappings": [ { "columnName": "_to", "identifier": "Recipient" } - ] + ], + "entityType": "MailMessage" } ] } @@ -2767,7 +2809,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "CorelightTypoSquattingOrPunycodePhishingHTTPRequest_AnalyticalRules Analytics Rule with template version 3.2.1", + "description": "CorelightTypoSquattingOrPunycodePhishingHTTPRequest_AnalyticalRules Analytics Rule with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('analyticRuleObject10').analyticRuleVersion10]", @@ -2795,11 +2837,11 @@ "status": "Available", "requiredDataConnectors": [ { - "connectorId": "Corelight", "dataTypes": [ "Corelight_v2_http", "corelight_http" - ] + ], + "connectorId": "Corelight" } ], "tactics": [ @@ -2810,13 +2852,13 @@ ], "entityMappings": [ { - "entityType": "IP", "fieldMappings": [ { "columnName": "id_orig_h", "identifier": "Address" } - ] + ], + "entityType": "IP" } ] } @@ -2871,7 +2913,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "Corelight data connector with template version 3.2.1", + "description": "Corelight data connector with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('dataConnectorVersion1')]", @@ -4062,7 +4104,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "Corelight Data Parser with template version 3.2.1", + "description": "Corelight Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject1').parserVersion1]", @@ -4192,7 +4234,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_bacnet Data Parser with template version 3.2.1", + "description": "corelight_bacnet Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject2').parserVersion2]", @@ -4322,7 +4364,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_capture_loss Data Parser with template version 3.2.1", + "description": "corelight_capture_loss Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject3').parserVersion3]", @@ -4452,7 +4494,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_cip Data Parser with template version 3.2.1", + "description": "corelight_cip Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject4').parserVersion4]", @@ -4582,7 +4624,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_conn Data Parser with template version 3.2.1", + "description": "corelight_conn Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject5').parserVersion5]", @@ -4712,7 +4754,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_conn_long Data Parser with template version 3.2.1", + "description": "corelight_conn_long Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject6').parserVersion6]", @@ -4842,7 +4884,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_conn_red Data Parser with template version 3.2.1", + "description": "corelight_conn_red Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject7').parserVersion7]", @@ -4972,7 +5014,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_corelight_burst Data Parser with template version 3.2.1", + "description": "corelight_corelight_burst Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject8').parserVersion8]", @@ -5102,7 +5144,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_corelight_overall_capture_loss Data Parser with template version 3.2.1", + "description": "corelight_corelight_overall_capture_loss Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject9').parserVersion9]", @@ -5232,7 +5274,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_corelight_profiling Data Parser with template version 3.2.1", + "description": "corelight_corelight_profiling Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject10').parserVersion10]", @@ -5362,7 +5404,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_datared Data Parser with template version 3.2.1", + "description": "corelight_datared Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject11').parserVersion11]", @@ -5492,7 +5534,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_dce_rpc Data Parser with template version 3.2.1", + "description": "corelight_dce_rpc Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject12').parserVersion12]", @@ -5622,7 +5664,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_dga Data Parser with template version 3.2.1", + "description": "corelight_dga Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject13').parserVersion13]", @@ -5752,7 +5794,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_dhcp Data Parser with template version 3.2.1", + "description": "corelight_dhcp Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject14').parserVersion14]", @@ -5882,7 +5924,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_dnp3 Data Parser with template version 3.2.1", + "description": "corelight_dnp3 Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject15').parserVersion15]", @@ -6012,7 +6054,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_dns Data Parser with template version 3.2.1", + "description": "corelight_dns Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject16').parserVersion16]", @@ -6142,7 +6184,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_dns_red Data Parser with template version 3.2.1", + "description": "corelight_dns_red Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject17').parserVersion17]", @@ -6272,7 +6314,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_dpd Data Parser with template version 3.2.1", + "description": "corelight_dpd Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject18').parserVersion18]", @@ -6402,7 +6444,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_encrypted_dns Data Parser with template version 3.2.1", + "description": "corelight_encrypted_dns Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject19').parserVersion19]", @@ -6532,7 +6574,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_enip Data Parser with template version 3.2.1", + "description": "corelight_enip Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject20').parserVersion20]", @@ -6662,7 +6704,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_enip_debug Data Parser with template version 3.2.1", + "description": "corelight_enip_debug Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject21').parserVersion21]", @@ -6792,7 +6834,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_enip_list_identity Data Parser with template version 3.2.1", + "description": "corelight_enip_list_identity Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject22').parserVersion22]", @@ -6922,7 +6964,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_etc_viz Data Parser with template version 3.2.1", + "description": "corelight_etc_viz Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject23').parserVersion23]", @@ -7052,7 +7094,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_files Data Parser with template version 3.2.1", + "description": "corelight_files Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject24').parserVersion24]", @@ -7182,7 +7224,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_files_red Data Parser with template version 3.2.1", + "description": "corelight_files_red Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject25').parserVersion25]", @@ -7312,7 +7354,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_ftp Data Parser with template version 3.2.1", + "description": "corelight_ftp Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject26').parserVersion26]", @@ -7442,7 +7484,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_generic_dns_tunnels Data Parser with template version 3.2.1", + "description": "corelight_generic_dns_tunnels Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject27').parserVersion27]", @@ -7572,7 +7614,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_generic_icmp_tunnels Data Parser with template version 3.2.1", + "description": "corelight_generic_icmp_tunnels Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject28').parserVersion28]", @@ -7702,7 +7744,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_http Data Parser with template version 3.2.1", + "description": "corelight_http Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject29').parserVersion29]", @@ -7832,7 +7874,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_http2 Data Parser with template version 3.2.1", + "description": "corelight_http2 Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject30').parserVersion30]", @@ -7962,7 +8004,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_http_red Data Parser with template version 3.2.1", + "description": "corelight_http_red Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject31').parserVersion31]", @@ -8092,7 +8134,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_icmp_specific_tunnels Data Parser with template version 3.2.1", + "description": "corelight_icmp_specific_tunnels Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject32').parserVersion32]", @@ -8222,7 +8264,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_intel Data Parser with template version 3.2.1", + "description": "corelight_intel Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject33').parserVersion33]", @@ -8352,7 +8394,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_ipsec Data Parser with template version 3.2.1", + "description": "corelight_ipsec Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject34').parserVersion34]", @@ -8482,7 +8524,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_irc Data Parser with template version 3.2.1", + "description": "corelight_irc Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject35').parserVersion35]", @@ -8612,7 +8654,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_iso_cotp Data Parser with template version 3.2.1", + "description": "corelight_iso_cotp Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject36').parserVersion36]", @@ -8742,7 +8784,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_kerberos Data Parser with template version 3.2.1", + "description": "corelight_kerberos Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject37').parserVersion37]", @@ -8872,7 +8914,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_known_certs Data Parser with template version 3.2.1", + "description": "corelight_known_certs Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject38').parserVersion38]", @@ -9002,7 +9044,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_known_devices Data Parser with template version 3.2.1", + "description": "corelight_known_devices Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject39').parserVersion39]", @@ -9132,7 +9174,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_known_domains Data Parser with template version 3.2.1", + "description": "corelight_known_domains Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject40').parserVersion40]", @@ -9262,7 +9304,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_known_hosts Data Parser with template version 3.2.1", + "description": "corelight_known_hosts Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject41').parserVersion41]", @@ -9392,7 +9434,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_known_names Data Parser with template version 3.2.1", + "description": "corelight_known_names Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject42').parserVersion42]", @@ -9522,7 +9564,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_known_remotes Data Parser with template version 3.2.1", + "description": "corelight_known_remotes Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject43').parserVersion43]", @@ -9652,7 +9694,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_known_services Data Parser with template version 3.2.1", + "description": "corelight_known_services Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject44').parserVersion44]", @@ -9782,7 +9824,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_known_users Data Parser with template version 3.2.1", + "description": "corelight_known_users Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject45').parserVersion45]", @@ -9912,7 +9954,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_local_subnets Data Parser with template version 3.2.1", + "description": "corelight_local_subnets Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject46').parserVersion46]", @@ -10042,7 +10084,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_local_subnets_dj Data Parser with template version 3.2.1", + "description": "corelight_local_subnets_dj Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject47').parserVersion47]", @@ -10172,7 +10214,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_local_subnets_graphs Data Parser with template version 3.2.1", + "description": "corelight_local_subnets_graphs Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject48').parserVersion48]", @@ -10302,7 +10344,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_log4shell Data Parser with template version 3.2.1", + "description": "corelight_log4shell Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject49').parserVersion49]", @@ -10432,7 +10474,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_modbus Data Parser with template version 3.2.1", + "description": "corelight_modbus Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject50').parserVersion50]", @@ -10562,7 +10604,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_mqtt_connect Data Parser with template version 3.2.1", + "description": "corelight_mqtt_connect Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject51').parserVersion51]", @@ -10692,7 +10734,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_mqtt_publish Data Parser with template version 3.2.1", + "description": "corelight_mqtt_publish Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject52').parserVersion52]", @@ -10822,7 +10864,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_mqtt_subscribe Data Parser with template version 3.2.1", + "description": "corelight_mqtt_subscribe Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject53').parserVersion53]", @@ -10952,7 +10994,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_mysql Data Parser with template version 3.2.1", + "description": "corelight_mysql Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject54').parserVersion54]", @@ -11082,7 +11124,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_notice Data Parser with template version 3.2.1", + "description": "corelight_notice Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject55').parserVersion55]", @@ -11212,7 +11254,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_ntlm Data Parser with template version 3.2.1", + "description": "corelight_ntlm Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject56').parserVersion56]", @@ -11342,7 +11384,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_ntp Data Parser with template version 3.2.1", + "description": "corelight_ntp Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject57').parserVersion57]", @@ -11472,7 +11514,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_ocsp Data Parser with template version 3.2.1", + "description": "corelight_ocsp Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject58').parserVersion58]", @@ -11602,7 +11644,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_openflow Data Parser with template version 3.2.1", + "description": "corelight_openflow Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject59').parserVersion59]", @@ -11732,7 +11774,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_packet_filter Data Parser with template version 3.2.1", + "description": "corelight_packet_filter Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject60').parserVersion60]", @@ -11862,7 +11904,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_pe Data Parser with template version 3.2.1", + "description": "corelight_pe Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject61').parserVersion61]", @@ -11992,7 +12034,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_profinet Data Parser with template version 3.2.1", + "description": "corelight_profinet Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject62').parserVersion62]", @@ -12122,7 +12164,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_profinet_dce_rpc Data Parser with template version 3.2.1", + "description": "corelight_profinet_dce_rpc Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject63').parserVersion63]", @@ -12252,7 +12294,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_profinet_debug Data Parser with template version 3.2.1", + "description": "corelight_profinet_debug Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject64').parserVersion64]", @@ -12382,7 +12424,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_radius Data Parser with template version 3.2.1", + "description": "corelight_radius Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject65').parserVersion65]", @@ -12512,7 +12554,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_rdp Data Parser with template version 3.2.1", + "description": "corelight_rdp Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject66').parserVersion66]", @@ -12642,7 +12684,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_reporter Data Parser with template version 3.2.1", + "description": "corelight_reporter Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject67').parserVersion67]", @@ -12772,7 +12814,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_rfb Data Parser with template version 3.2.1", + "description": "corelight_rfb Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject68').parserVersion68]", @@ -12902,7 +12944,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_s7comm Data Parser with template version 3.2.1", + "description": "corelight_s7comm Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject69').parserVersion69]", @@ -13032,7 +13074,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_signatures Data Parser with template version 3.2.1", + "description": "corelight_signatures Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject70').parserVersion70]", @@ -13162,7 +13204,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_sip Data Parser with template version 3.2.1", + "description": "corelight_sip Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject71').parserVersion71]", @@ -13292,7 +13334,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_smartpcap Data Parser with template version 3.2.1", + "description": "corelight_smartpcap Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject72').parserVersion72]", @@ -13422,7 +13464,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_smartpcap_stats Data Parser with template version 3.2.1", + "description": "corelight_smartpcap_stats Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject73').parserVersion73]", @@ -13552,7 +13594,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_smb_files Data Parser with template version 3.2.1", + "description": "corelight_smb_files Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject74').parserVersion74]", @@ -13682,7 +13724,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_smb_mapping Data Parser with template version 3.2.1", + "description": "corelight_smb_mapping Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject75').parserVersion75]", @@ -13812,7 +13854,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_smtp Data Parser with template version 3.2.1", + "description": "corelight_smtp Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject76').parserVersion76]", @@ -13942,7 +13984,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_smtp_links Data Parser with template version 3.2.1", + "description": "corelight_smtp_links Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject77').parserVersion77]", @@ -14072,7 +14114,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_snmp Data Parser with template version 3.2.1", + "description": "corelight_snmp Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject78').parserVersion78]", @@ -14202,7 +14244,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_socks Data Parser with template version 3.2.1", + "description": "corelight_socks Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject79').parserVersion79]", @@ -14332,7 +14374,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_software Data Parser with template version 3.2.1", + "description": "corelight_software Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject80').parserVersion80]", @@ -14462,7 +14504,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_specific_dns_tunnels Data Parser with template version 3.2.1", + "description": "corelight_specific_dns_tunnels Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject81').parserVersion81]", @@ -14592,7 +14634,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_ssh Data Parser with template version 3.2.1", + "description": "corelight_ssh Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject82').parserVersion82]", @@ -14722,7 +14764,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_ssl Data Parser with template version 3.2.1", + "description": "corelight_ssl Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject83').parserVersion83]", @@ -14852,7 +14894,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_ssl_red Data Parser with template version 3.2.1", + "description": "corelight_ssl_red Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject84').parserVersion84]", @@ -14982,7 +15024,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_stats Data Parser with template version 3.2.1", + "description": "corelight_stats Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject85').parserVersion85]", @@ -15112,7 +15154,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_stepping Data Parser with template version 3.2.1", + "description": "corelight_stepping Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject86').parserVersion86]", @@ -15242,7 +15284,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_stun Data Parser with template version 3.2.1", + "description": "corelight_stun Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject87').parserVersion87]", @@ -15372,7 +15414,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_stun_nat Data Parser with template version 3.2.1", + "description": "corelight_stun_nat Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject88').parserVersion88]", @@ -15502,7 +15544,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_suricata_corelight Data Parser with template version 3.2.1", + "description": "corelight_suricata_corelight Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject89').parserVersion89]", @@ -15632,7 +15674,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_suricata_eve Data Parser with template version 3.2.1", + "description": "corelight_suricata_eve Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject90').parserVersion90]", @@ -15762,7 +15804,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_suricata_stats Data Parser with template version 3.2.1", + "description": "corelight_suricata_stats Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject91').parserVersion91]", @@ -15892,7 +15934,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_suricata_zeek_stats Data Parser with template version 3.2.1", + "description": "corelight_suricata_zeek_stats Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject92').parserVersion92]", @@ -16022,7 +16064,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_syslog Data Parser with template version 3.2.1", + "description": "corelight_syslog Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject93').parserVersion93]", @@ -16152,7 +16194,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_tds Data Parser with template version 3.2.1", + "description": "corelight_tds Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject94').parserVersion94]", @@ -16282,7 +16324,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_tds_rpc Data Parser with template version 3.2.1", + "description": "corelight_tds_rpc Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject95').parserVersion95]", @@ -16412,7 +16454,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_tds_sql_batch Data Parser with template version 3.2.1", + "description": "corelight_tds_sql_batch Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject96').parserVersion96]", @@ -16542,7 +16584,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_traceroute Data Parser with template version 3.2.1", + "description": "corelight_traceroute Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject97').parserVersion97]", @@ -16672,7 +16714,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_tunnel Data Parser with template version 3.2.1", + "description": "corelight_tunnel Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject98').parserVersion98]", @@ -16802,7 +16844,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_unknown_smartpcap Data Parser with template version 3.2.1", + "description": "corelight_unknown_smartpcap Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject99').parserVersion99]", @@ -16932,7 +16974,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_util_stats Data Parser with template version 3.2.1", + "description": "corelight_util_stats Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject100').parserVersion100]", @@ -17062,7 +17104,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_vpn Data Parser with template version 3.2.1", + "description": "corelight_vpn Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject101').parserVersion101]", @@ -17192,7 +17234,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_weird Data Parser with template version 3.2.1", + "description": "corelight_weird Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject102').parserVersion102]", @@ -17322,7 +17364,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_weird_red Data Parser with template version 3.2.1", + "description": "corelight_weird_red Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject103').parserVersion103]", @@ -17452,7 +17494,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_weird_stats Data Parser with template version 3.2.1", + "description": "corelight_weird_stats Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject104').parserVersion104]", @@ -17582,7 +17624,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_wireguard Data Parser with template version 3.2.1", + "description": "corelight_wireguard Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject105').parserVersion105]", @@ -17712,7 +17754,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_x509 Data Parser with template version 3.2.1", + "description": "corelight_x509 Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject106').parserVersion106]", @@ -17842,7 +17884,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_x509_red Data Parser with template version 3.2.1", + "description": "corelight_x509_red Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject107').parserVersion107]", @@ -17972,7 +18014,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_zeek_doctor Data Parser with template version 3.2.1", + "description": "corelight_zeek_doctor Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject108').parserVersion108]", @@ -18102,7 +18144,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_corelight_metrics_disk Data Parser with template version 3.2.1", + "description": "corelight_corelight_metrics_disk Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject109').parserVersion109]", @@ -18232,7 +18274,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_corelight_metrics_iface Data Parser with template version 3.2.1", + "description": "corelight_corelight_metrics_iface Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject110').parserVersion110]", @@ -18362,7 +18404,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_corelight_metrics_memory Data Parser with template version 3.2.1", + "description": "corelight_corelight_metrics_memory Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject111').parserVersion111]", @@ -18492,7 +18534,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_corelight_metrics_system Data Parser with template version 3.2.1", + "description": "corelight_corelight_metrics_system Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject112').parserVersion112]", @@ -18622,7 +18664,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_corelight_metrics_zeek_doctor Data Parser with template version 3.2.1", + "description": "corelight_corelight_metrics_zeek_doctor Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject113').parserVersion113]", @@ -18752,7 +18794,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_suri_aggregations Data Parser with template version 3.2.1", + "description": "corelight_suri_aggregations Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject114').parserVersion114]", @@ -18882,7 +18924,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_anomaly Data Parser with template version 3.2.1", + "description": "corelight_anomaly Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject115').parserVersion115]", @@ -19012,7 +19054,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "corelight_first_seen Data Parser with template version 3.2.1", + "description": "corelight_first_seen Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject116').parserVersion116]", @@ -19136,42 +19178,36 @@ { "type": "Microsoft.OperationalInsights/workspaces/providers/contentTemplates", "apiVersion": "2023-04-01-preview", - "name": "[variables('huntingQueryObject1').huntingQueryTemplateSpecName1]", + "name": "[variables('parserObject117').parserTemplateSpecName117]", "location": "[parameters('workspace-location')]", "dependsOn": [ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "CorelightAbnormalEmailSubject_HuntingQueries Hunting Query with template version 3.2.1", + "description": "corelight_conn_agg Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", - "contentVersion": "[variables('huntingQueryObject1').huntingQueryVersion1]", + "contentVersion": "[variables('parserObject117').parserVersion117]", "parameters": {}, "variables": {}, "resources": [ { - "type": "Microsoft.OperationalInsights/savedSearches", + "name": "[variables('parserObject117')._parserName117]", "apiVersion": "2022-10-01", - "name": "Corelight_Hunting_Query_1", + "type": "Microsoft.OperationalInsights/workspaces/savedSearches", "location": "[parameters('workspace-location')]", "properties": { "eTag": "*", - "displayName": "Corelight - Abnormal Email Subject", - "category": "Hunting Queries", - "query": "corelight_smtp\n| where subject hasprefix @'\\=?utf-16'\n", + "displayName": "Corelight Connection Aggregated Events", + "category": "Microsoft Sentinel Parser", + "functionAlias": "corelight_conn_agg", + "query": "let ConnStateLookup = datatable(\n conn_state: string,\n conn_state_desc: string,\n action: string\n)[\n \"S0\",\"Connection attempt seen, no reply.\",\"teardown\",\n \"S1\",\"Connection established, not terminated.\",\"allowed\",\n \"SF\",\"Normal establishment and termination.\",\"allowed\",\n \"REJ\",\"Connection attempt rejected.\",\"blocked\",\n \"S2\",\"Connection established and close attempt by originator seen (but no reply from responder).\",\"allowed\",\n \"S3\",\"Connection established and close attempt by responder seen (but no reply from originator).\",\"allowed\",\n \"RSTO\",\"Connection established, originator aborted (sent a RST).\",\"allowed\",\n \"RSTR\",\"Established, responder aborted.\",\"allowed\",\n \"RSTOS0\",\"Originator sent a SYN followed by a RST, we never saw a SYN-ACK from the responder.\",\"teardown\",\n \"RSTRH\",\"Responder sent a SYN ACK followed by a RST, we never saw a SYN from the (purported) originator.\",\"teardown\",\n \"SH\",\"Originator sent a SYN followed by a FIN, we never saw a SYN ACK from the responder (hence the connection was 'half' open).\",\"teardown\",\n \"SHR\",\"Responder sent a SYN ACK followed by a FIN, we never saw a SYN from the originator.\",\"teardown\",\n \"OTH\",\"No SYN seen, just midstream traffic (a 'partial connection' that was not later closed).\",\"allowed\"\n];\nlet dummy_table = datatable(TimeGenerated: datetime, uid_s: string) [];\nlet corelight_conn_agg = view () {\n union isfuzzy=true\n Corelight_v2_conn_agg_CL,\n dummy_table\n | summarize arg_max(TimeGenerated, *) by uid_s\n | extend\n path=column_ifexists(\"_path_s\", \"\"),\n system_name=column_ifexists(\"_system_name_s\", \"\"),\n id_orig_h=column_ifexists(\"id_orig_h_s\", \"\"),\n id_orig_p=column_ifexists(\"id_orig_p_d\", real(null)),\n id_resp_h=column_ifexists(\"id_resp_h_s\", \"\"),\n id_resp_p=column_ifexists(\"id_resp_p_d\", real(null)),\n proto=column_ifexists(\"proto_s\", \"\"),\n suri_ids=column_ifexists(\"suri_ids_s\", \"\"),\n local_orig=column_ifexists(\"local_orig_b\", \"\"),\n local_resp=column_ifexists(\"local_resp_b\", \"\"),\n id_orig_h_n=column_ifexists(\"id_orig_h_n_s\", \"\"),\n id_resp_h_n=column_ifexists(\"id_resp_h_n_s\", \"\"),\n write_ts=column_ifexists(\"_write_ts_t\", datetime(null)),\n uid=column_ifexists(\"uid_s\", \"\"),\n community_id=column_ifexists(\"community_id_s\",\"\"),\n spcap_url=column_ifexists(\"spcap_url_s\", \"\"),\n service=column_ifexists(\"service_s\", \"\"),\n apps=column_ifexists(\"app_s\", \"\"),\n corelight_shunted=column_ifexists(\"corelight_shunted_b\", \"\"),\n duration=column_ifexists(\"duration_d\", real(null)),\n orig_bytes=column_ifexists(\"orig_bytes_d\", real(null)),\n resp_bytes=column_ifexists(\"resp_bytes_d\", real(null)),\n missed_bytes=column_ifexists(\"missed_bytes_d\", real(null)),\n orig_shunted_pkts=column_ifexists(\"orig_shunted_pkts_d\", real(null)),\n orig_shunted_bytes=column_ifexists(\"orig_shunted_bytes_d\", real(null)),\n resp_shunted_pkts=column_ifexists(\"resp_shunted_pkts_d\", real(null)),\n resp_shunted_bytes=column_ifexists(\"resp_shunted_bytes_d\", real(null)),\n orig_pkts=column_ifexists(\"orig_pkts_d\", real(null)),\n orig_ip_bytes=column_ifexists(\"orig_ip_bytes_d\", real(null)),\n resp_pkts=column_ifexists(\"resp_pkts_d\", real(null)),\n resp_ip_bytes=column_ifexists(\"resp_ip_bytes_d\", real(null)),\n conn_state=column_ifexists(\"conn_state_s\", \"\"),\n history=column_ifexists(\"history_s\", \"\"),\n tunnel_parents=column_ifexists(\"tunnel_parents_s\", \"\"),\n netskope_site_id=column_ifexists(\"netskope_site_id_s\", \"\"),\n netskope_user_id=column_ifexists(\"netskope_user_id_s\", \"\"),\n id_vlan=column_ifexists(\"id_vlan_d\", real(null)),\n vlan=column_ifexists(\"vlan_d\", real(null)),\n inner_vlan=column_ifexists(\"inner_vlan_d\", real(null)),\n orig_inst_org_id=column_ifexists(\"orig_inst_org_id_s\", \"\"),\n orig_inst_name=column_ifexists(\"orig_inst_name_s\", \"\"),\n orig_inst_az=column_ifexists(\"orig_inst_az_s\", \"\"),\n orig_inst_vpc_id=column_ifexists(\"orig_inst_vpc_id_s\", \"\"),\n orig_inst_subnet_id=column_ifexists(\"orig_inst_subnet_id_s\", \"\"),\n orig_inst_sg_ids=column_ifexists(\"orig_inst_sg_ids_s\", \"\"),\n orig_inst_project=column_ifexists(\"orig_inst_project_s\", \"\"),\n orig_inst_network=column_ifexists(\"orig_inst_network_s\", \"\"),\n orig_inst_network_tags=column_ifexists(\"orig_inst_network_tags_s\", \"\"),\n orig_inst_id=column_ifexists(\"orig_inst_id_s\", \"\"),\n orig_inst_resource_group=column_ifexists(\"orig_inst_resource_group_s\", \"\"),\n orig_inst_subscription=column_ifexists(\"orig_inst_subscription_s\", \"\"),\n orig_inst_os=column_ifexists(\"orig_inst_os_s\", \"\"),\n orig_inst_location=column_ifexists(\"orig_inst_location_s\", \"\"),\n orig_inst_nsg=column_ifexists(\"orig_inst_nsg_s\", \"\"),\n resp_inst_org_id=column_ifexists(\"resp_inst_org_id_s\", \"\"),\n resp_inst_name=column_ifexists(\"resp_inst_name_s\", \"\"),\n resp_inst_az=column_ifexists(\"resp_inst_az_s\", \"\"),\n resp_inst_vpc_id=column_ifexists(\"resp_inst_vpc_id_s\", \"\"),\n resp_inst_subnet_id=column_ifexists(\"resp_inst_subnet_id_s\", \"\"),\n resp_inst_sg_ids=column_ifexists(\"resp_inst_sg_ids_s\", \"\"),\n resp_inst_project=column_ifexists(\"resp_inst_project_s\", \"\"),\n resp_inst_network=column_ifexists(\"resp_inst_network_s\", \"\"),\n resp_inst_network_tags=column_ifexists(\"resp_inst_network_tags_s\", \"\"),\n resp_inst_id=column_ifexists(\"resp_inst_id_s\", \"\"),\n resp_inst_resource_group=column_ifexists(\"resp_inst_resource_group_s\", \"\"),\n resp_inst_subscription=column_ifexists(\"resp_inst_subscription_s\", \"\"),\n resp_inst_os=column_ifexists(\"resp_inst_os_s\", \"\"),\n resp_inst_location=column_ifexists(\"resp_inst_location_s\", \"\"),\n resp_inst_nsg=column_ifexists(\"resp_inst_nsg_s\", \"\")\n | lookup ConnStateLookup on conn_state\n | extend\n EventVendor = \"Corelight\",\n EventProduct = \"CorelightSensor\",\n EventType = \"conn_agg\",\n ts = TimeGenerated,\n src=id_orig_h,\n src_ip=id_orig_h,\n src_port=id_orig_p,\n dest=id_resp_h,\n dest_ip=id_resp_h,\n dest_port=id_resp_p,\n bytes_out=orig_ip_bytes,\n packets_out=orig_pkts,\n bytes_in=resp_ip_bytes,\n packets_in=resp_pkts,\n session_id=uid,\n bytes=resp_ip_bytes + orig_ip_bytes,\n sensor_name = coalesce(system_name, \"unknown\"),\n transport=iff(proto=='icmp' and id_orig_h matches regex \".*:.*\", \"icmp6\", proto),\n app=split(service, \",\")\n | extend\n is_dest_internal_ip = iff(local_resp == true, \"true\", \"false\"),\n is_src_internal_ip = iff(local_orig == true, \"true\", \"false\"),\n direction = case(\n local_orig == \"true\" and local_resp == \"true\",\n \"internal\", \n local_orig == \"true\" and local_resp == \"false\",\n \"outbound\", \n local_orig == \"false\" and local_resp == \"false\",\n \"external\", \n local_orig == \"false\" and local_resp == \"true\",\n \"inbound\", \n \"unknown\"\n )\n | project\n TimeGenerated,\n path,\n system_name,\n id_orig_h,\n id_orig_p,\n id_resp_h,\n id_resp_p,\n proto,\n suri_ids,\n local_orig,\n local_resp,\n id_orig_h_n,\n id_resp_h_n,\n write_ts,\n uid,\n community_id,\n spcap_url,\n service,\n app,\n apps,\n corelight_shunted,\n duration,\n orig_bytes,\n resp_bytes,\n missed_bytes,\n orig_shunted_pkts,\n orig_shunted_bytes,\n resp_shunted_pkts,\n resp_shunted_bytes,\n orig_pkts,\n orig_ip_bytes,\n resp_pkts,\n resp_ip_bytes,\n conn_state,\n history,\n tunnel_parents,\n netskope_site_id,\n netskope_user_id,\n id_vlan,\n vlan,\n inner_vlan,\n orig_inst_org_id,\n orig_inst_name,\n orig_inst_az,\n orig_inst_vpc_id,\n orig_inst_subnet_id,\n orig_inst_sg_ids,\n orig_inst_project,\n orig_inst_network,\n orig_inst_network_tags,\n orig_inst_id,\n orig_inst_resource_group,\n orig_inst_subscription,\n orig_inst_os,\n orig_inst_location,\n orig_inst_nsg,\n resp_inst_org_id,\n resp_inst_name,\n resp_inst_az,\n resp_inst_vpc_id,\n resp_inst_subnet_id,\n resp_inst_sg_ids,\n resp_inst_project,\n resp_inst_network,\n resp_inst_network_tags,\n resp_inst_id,\n resp_inst_resource_group,\n resp_inst_subscription,\n resp_inst_os,\n resp_inst_location,\n resp_inst_nsg,\n EventVendor,\n EventProduct,\n EventType,\n ts,\n is_dest_internal_ip,\n is_src_internal_ip,\n direction,\n conn_state_desc,\n action,\n src,\n src_ip,\n src_port,\n dest,\n dest_ip,\n dest_port,\n bytes_out,\n packets_out,\n bytes_in,\n packets_in,\n session_id,\n bytes,\n sensor_name,\n transport\n};\ncorelight_conn_agg\n", + "functionParameters": "", "version": 2, "tags": [ { "name": "description", - "value": "Query searches for emails with NON-Ascii characters within the Subject ." - }, - { - "name": "tactics", - "value": "InitialAccess" - }, - { - "name": "techniques", - "value": "T1566" + "value": "" } ] } @@ -19179,16 +19215,18 @@ { "type": "Microsoft.OperationalInsights/workspaces/providers/metadata", "apiVersion": "2022-01-01-preview", - "name": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat('HuntingQuery-', last(split(resourceId('Microsoft.OperationalInsights/savedSearches', variables('huntingQueryObject1')._huntingQuerycontentId1),'/'))))]", + "name": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat('Parser-', last(split(variables('parserObject117')._parserId117,'/'))))]", + "dependsOn": [ + "[variables('parserObject117')._parserId117]" + ], "properties": { - "description": "Corelight Hunting Query 1", - "parentId": "[resourceId('Microsoft.OperationalInsights/savedSearches', variables('huntingQueryObject1')._huntingQuerycontentId1)]", - "contentId": "[variables('huntingQueryObject1')._huntingQuerycontentId1]", - "kind": "HuntingQuery", - "version": "[variables('huntingQueryObject1').huntingQueryVersion1]", + "parentId": "[resourceId('Microsoft.OperationalInsights/workspaces/savedSearches', parameters('workspace'), 'corelight_conn_agg')]", + "contentId": "[variables('parserObject117').parserContentId117]", + "kind": "Parser", + "version": "[variables('parserObject117').parserVersion117]", "source": { - "kind": "Solution", "name": "Corelight", + "kind": "Solution", "sourceId": "[variables('_solutionId')]" }, "author": { @@ -19209,53 +19247,97 @@ "packageName": "[variables('_solutionName')]", "packageId": "[variables('_solutionId')]", "contentSchemaVersion": "3.0.0", - "contentId": "[variables('huntingQueryObject1')._huntingQuerycontentId1]", - "contentKind": "HuntingQuery", - "displayName": "Corelight - Abnormal Email Subject", - "contentProductId": "[concat(take(variables('_solutionId'),50),'-','hq','-', uniqueString(concat(variables('_solutionId'),'-','HuntingQuery','-',variables('huntingQueryObject1')._huntingQuerycontentId1,'-', '1.0.0')))]", - "id": "[concat(take(variables('_solutionId'),50),'-','hq','-', uniqueString(concat(variables('_solutionId'),'-','HuntingQuery','-',variables('huntingQueryObject1')._huntingQuerycontentId1,'-', '1.0.0')))]", - "version": "1.0.0" + "contentId": "[variables('parserObject117').parserContentId117]", + "contentKind": "Parser", + "displayName": "Corelight Connection Aggregated Events", + "contentProductId": "[concat(take(variables('_solutionId'),50),'-','pr','-', uniqueString(concat(variables('_solutionId'),'-','Parser','-',variables('parserObject117').parserContentId117,'-', '1.1.0')))]", + "id": "[concat(take(variables('_solutionId'),50),'-','pr','-', uniqueString(concat(variables('_solutionId'),'-','Parser','-',variables('parserObject117').parserContentId117,'-', '1.1.0')))]", + "version": "[variables('parserObject117').parserVersion117]" + } + }, + { + "type": "Microsoft.OperationalInsights/workspaces/savedSearches", + "apiVersion": "2022-10-01", + "name": "[variables('parserObject117')._parserName117]", + "location": "[parameters('workspace-location')]", + "properties": { + "eTag": "*", + "displayName": "Corelight Connection Aggregated Events", + "category": "Microsoft Sentinel Parser", + "functionAlias": "corelight_conn_agg", + "query": "let ConnStateLookup = datatable(\n conn_state: string,\n conn_state_desc: string,\n action: string\n)[\n \"S0\",\"Connection attempt seen, no reply.\",\"teardown\",\n \"S1\",\"Connection established, not terminated.\",\"allowed\",\n \"SF\",\"Normal establishment and termination.\",\"allowed\",\n \"REJ\",\"Connection attempt rejected.\",\"blocked\",\n \"S2\",\"Connection established and close attempt by originator seen (but no reply from responder).\",\"allowed\",\n \"S3\",\"Connection established and close attempt by responder seen (but no reply from originator).\",\"allowed\",\n \"RSTO\",\"Connection established, originator aborted (sent a RST).\",\"allowed\",\n \"RSTR\",\"Established, responder aborted.\",\"allowed\",\n \"RSTOS0\",\"Originator sent a SYN followed by a RST, we never saw a SYN-ACK from the responder.\",\"teardown\",\n \"RSTRH\",\"Responder sent a SYN ACK followed by a RST, we never saw a SYN from the (purported) originator.\",\"teardown\",\n \"SH\",\"Originator sent a SYN followed by a FIN, we never saw a SYN ACK from the responder (hence the connection was 'half' open).\",\"teardown\",\n \"SHR\",\"Responder sent a SYN ACK followed by a FIN, we never saw a SYN from the originator.\",\"teardown\",\n \"OTH\",\"No SYN seen, just midstream traffic (a 'partial connection' that was not later closed).\",\"allowed\"\n];\nlet dummy_table = datatable(TimeGenerated: datetime, uid_s: string) [];\nlet corelight_conn_agg = view () {\n union isfuzzy=true\n Corelight_v2_conn_agg_CL,\n dummy_table\n | summarize arg_max(TimeGenerated, *) by uid_s\n | extend\n path=column_ifexists(\"_path_s\", \"\"),\n system_name=column_ifexists(\"_system_name_s\", \"\"),\n id_orig_h=column_ifexists(\"id_orig_h_s\", \"\"),\n id_orig_p=column_ifexists(\"id_orig_p_d\", real(null)),\n id_resp_h=column_ifexists(\"id_resp_h_s\", \"\"),\n id_resp_p=column_ifexists(\"id_resp_p_d\", real(null)),\n proto=column_ifexists(\"proto_s\", \"\"),\n suri_ids=column_ifexists(\"suri_ids_s\", \"\"),\n local_orig=column_ifexists(\"local_orig_b\", \"\"),\n local_resp=column_ifexists(\"local_resp_b\", \"\"),\n id_orig_h_n=column_ifexists(\"id_orig_h_n_s\", \"\"),\n id_resp_h_n=column_ifexists(\"id_resp_h_n_s\", \"\"),\n write_ts=column_ifexists(\"_write_ts_t\", datetime(null)),\n uid=column_ifexists(\"uid_s\", \"\"),\n community_id=column_ifexists(\"community_id_s\",\"\"),\n spcap_url=column_ifexists(\"spcap_url_s\", \"\"),\n service=column_ifexists(\"service_s\", \"\"),\n apps=column_ifexists(\"app_s\", \"\"),\n corelight_shunted=column_ifexists(\"corelight_shunted_b\", \"\"),\n duration=column_ifexists(\"duration_d\", real(null)),\n orig_bytes=column_ifexists(\"orig_bytes_d\", real(null)),\n resp_bytes=column_ifexists(\"resp_bytes_d\", real(null)),\n missed_bytes=column_ifexists(\"missed_bytes_d\", real(null)),\n orig_shunted_pkts=column_ifexists(\"orig_shunted_pkts_d\", real(null)),\n orig_shunted_bytes=column_ifexists(\"orig_shunted_bytes_d\", real(null)),\n resp_shunted_pkts=column_ifexists(\"resp_shunted_pkts_d\", real(null)),\n resp_shunted_bytes=column_ifexists(\"resp_shunted_bytes_d\", real(null)),\n orig_pkts=column_ifexists(\"orig_pkts_d\", real(null)),\n orig_ip_bytes=column_ifexists(\"orig_ip_bytes_d\", real(null)),\n resp_pkts=column_ifexists(\"resp_pkts_d\", real(null)),\n resp_ip_bytes=column_ifexists(\"resp_ip_bytes_d\", real(null)),\n conn_state=column_ifexists(\"conn_state_s\", \"\"),\n history=column_ifexists(\"history_s\", \"\"),\n tunnel_parents=column_ifexists(\"tunnel_parents_s\", \"\"),\n netskope_site_id=column_ifexists(\"netskope_site_id_s\", \"\"),\n netskope_user_id=column_ifexists(\"netskope_user_id_s\", \"\"),\n id_vlan=column_ifexists(\"id_vlan_d\", real(null)),\n vlan=column_ifexists(\"vlan_d\", real(null)),\n inner_vlan=column_ifexists(\"inner_vlan_d\", real(null)),\n orig_inst_org_id=column_ifexists(\"orig_inst_org_id_s\", \"\"),\n orig_inst_name=column_ifexists(\"orig_inst_name_s\", \"\"),\n orig_inst_az=column_ifexists(\"orig_inst_az_s\", \"\"),\n orig_inst_vpc_id=column_ifexists(\"orig_inst_vpc_id_s\", \"\"),\n orig_inst_subnet_id=column_ifexists(\"orig_inst_subnet_id_s\", \"\"),\n orig_inst_sg_ids=column_ifexists(\"orig_inst_sg_ids_s\", \"\"),\n orig_inst_project=column_ifexists(\"orig_inst_project_s\", \"\"),\n orig_inst_network=column_ifexists(\"orig_inst_network_s\", \"\"),\n orig_inst_network_tags=column_ifexists(\"orig_inst_network_tags_s\", \"\"),\n orig_inst_id=column_ifexists(\"orig_inst_id_s\", \"\"),\n orig_inst_resource_group=column_ifexists(\"orig_inst_resource_group_s\", \"\"),\n orig_inst_subscription=column_ifexists(\"orig_inst_subscription_s\", \"\"),\n orig_inst_os=column_ifexists(\"orig_inst_os_s\", \"\"),\n orig_inst_location=column_ifexists(\"orig_inst_location_s\", \"\"),\n orig_inst_nsg=column_ifexists(\"orig_inst_nsg_s\", \"\"),\n resp_inst_org_id=column_ifexists(\"resp_inst_org_id_s\", \"\"),\n resp_inst_name=column_ifexists(\"resp_inst_name_s\", \"\"),\n resp_inst_az=column_ifexists(\"resp_inst_az_s\", \"\"),\n resp_inst_vpc_id=column_ifexists(\"resp_inst_vpc_id_s\", \"\"),\n resp_inst_subnet_id=column_ifexists(\"resp_inst_subnet_id_s\", \"\"),\n resp_inst_sg_ids=column_ifexists(\"resp_inst_sg_ids_s\", \"\"),\n resp_inst_project=column_ifexists(\"resp_inst_project_s\", \"\"),\n resp_inst_network=column_ifexists(\"resp_inst_network_s\", \"\"),\n resp_inst_network_tags=column_ifexists(\"resp_inst_network_tags_s\", \"\"),\n resp_inst_id=column_ifexists(\"resp_inst_id_s\", \"\"),\n resp_inst_resource_group=column_ifexists(\"resp_inst_resource_group_s\", \"\"),\n resp_inst_subscription=column_ifexists(\"resp_inst_subscription_s\", \"\"),\n resp_inst_os=column_ifexists(\"resp_inst_os_s\", \"\"),\n resp_inst_location=column_ifexists(\"resp_inst_location_s\", \"\"),\n resp_inst_nsg=column_ifexists(\"resp_inst_nsg_s\", \"\")\n | lookup ConnStateLookup on conn_state\n | extend\n EventVendor = \"Corelight\",\n EventProduct = \"CorelightSensor\",\n EventType = \"conn_agg\",\n ts = TimeGenerated,\n src=id_orig_h,\n src_ip=id_orig_h,\n src_port=id_orig_p,\n dest=id_resp_h,\n dest_ip=id_resp_h,\n dest_port=id_resp_p,\n bytes_out=orig_ip_bytes,\n packets_out=orig_pkts,\n bytes_in=resp_ip_bytes,\n packets_in=resp_pkts,\n session_id=uid,\n bytes=resp_ip_bytes + orig_ip_bytes,\n sensor_name = coalesce(system_name, \"unknown\"),\n transport=iff(proto=='icmp' and id_orig_h matches regex \".*:.*\", \"icmp6\", proto),\n app=split(service, \",\")\n | extend\n is_dest_internal_ip = iff(local_resp == true, \"true\", \"false\"),\n is_src_internal_ip = iff(local_orig == true, \"true\", \"false\"),\n direction = case(\n local_orig == \"true\" and local_resp == \"true\",\n \"internal\", \n local_orig == \"true\" and local_resp == \"false\",\n \"outbound\", \n local_orig == \"false\" and local_resp == \"false\",\n \"external\", \n local_orig == \"false\" and local_resp == \"true\",\n \"inbound\", \n \"unknown\"\n )\n | project\n TimeGenerated,\n path,\n system_name,\n id_orig_h,\n id_orig_p,\n id_resp_h,\n id_resp_p,\n proto,\n suri_ids,\n local_orig,\n local_resp,\n id_orig_h_n,\n id_resp_h_n,\n write_ts,\n uid,\n community_id,\n spcap_url,\n service,\n app,\n apps,\n corelight_shunted,\n duration,\n orig_bytes,\n resp_bytes,\n missed_bytes,\n orig_shunted_pkts,\n orig_shunted_bytes,\n resp_shunted_pkts,\n resp_shunted_bytes,\n orig_pkts,\n orig_ip_bytes,\n resp_pkts,\n resp_ip_bytes,\n conn_state,\n history,\n tunnel_parents,\n netskope_site_id,\n netskope_user_id,\n id_vlan,\n vlan,\n inner_vlan,\n orig_inst_org_id,\n orig_inst_name,\n orig_inst_az,\n orig_inst_vpc_id,\n orig_inst_subnet_id,\n orig_inst_sg_ids,\n orig_inst_project,\n orig_inst_network,\n orig_inst_network_tags,\n orig_inst_id,\n orig_inst_resource_group,\n orig_inst_subscription,\n orig_inst_os,\n orig_inst_location,\n orig_inst_nsg,\n resp_inst_org_id,\n resp_inst_name,\n resp_inst_az,\n resp_inst_vpc_id,\n resp_inst_subnet_id,\n resp_inst_sg_ids,\n resp_inst_project,\n resp_inst_network,\n resp_inst_network_tags,\n resp_inst_id,\n resp_inst_resource_group,\n resp_inst_subscription,\n resp_inst_os,\n resp_inst_location,\n resp_inst_nsg,\n EventVendor,\n EventProduct,\n EventType,\n ts,\n is_dest_internal_ip,\n is_src_internal_ip,\n direction,\n conn_state_desc,\n action,\n src,\n src_ip,\n src_port,\n dest,\n dest_ip,\n dest_port,\n bytes_out,\n packets_out,\n bytes_in,\n packets_in,\n session_id,\n bytes,\n sensor_name,\n transport\n};\ncorelight_conn_agg\n", + "functionParameters": "", + "version": 2, + "tags": [ + { + "name": "description", + "value": "" + } + ] + } + }, + { + "type": "Microsoft.OperationalInsights/workspaces/providers/metadata", + "apiVersion": "2022-01-01-preview", + "location": "[parameters('workspace-location')]", + "name": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat('Parser-', last(split(variables('parserObject117')._parserId117,'/'))))]", + "dependsOn": [ + "[variables('parserObject117')._parserId117]" + ], + "properties": { + "parentId": "[resourceId('Microsoft.OperationalInsights/workspaces/savedSearches', parameters('workspace'), 'corelight_conn_agg')]", + "contentId": "[variables('parserObject117').parserContentId117]", + "kind": "Parser", + "version": "[variables('parserObject117').parserVersion117]", + "source": { + "kind": "Solution", + "name": "Corelight", + "sourceId": "[variables('_solutionId')]" + }, + "author": { + "name": "Corelight", + "email": "[variables('_email')]" + }, + "support": { + "name": "Corelight", + "tier": "Partner", + "link": "https://support.corelight.com/" + } } }, { "type": "Microsoft.OperationalInsights/workspaces/providers/contentTemplates", "apiVersion": "2023-04-01-preview", - "name": "[variables('huntingQueryObject2').huntingQueryTemplateSpecName2]", + "name": "[variables('parserObject118').parserTemplateSpecName118]", "location": "[parameters('workspace-location')]", "dependsOn": [ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "CorelightCompressedFilesTransferredOverHTTP_HuntingQueries Hunting Query with template version 3.2.1", + "description": "corelight_dns_agg Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", - "contentVersion": "[variables('huntingQueryObject2').huntingQueryVersion2]", + "contentVersion": "[variables('parserObject118').parserVersion118]", "parameters": {}, "variables": {}, "resources": [ { - "type": "Microsoft.OperationalInsights/savedSearches", + "name": "[variables('parserObject118')._parserName118]", "apiVersion": "2022-10-01", - "name": "Corelight_Hunting_Query_2", + "type": "Microsoft.OperationalInsights/workspaces/savedSearches", "location": "[parameters('workspace-location')]", "properties": { "eTag": "*", - "displayName": "Corelight - Compressed Files Transferred over HTTP", - "category": "Hunting Queries", - "query": "corelight_http\n| where TimeGenerated > ago(24h)\n| where method in~ ('POST', 'PUT')\n| where orig_mime_types has_any ('application/vnd.ms-cab-compressed', 'application/warc', 'application/x-7z-compressed', 'application/x-ace', 'application/x-arc', 'application/x-archive', 'application/x-arj', 'application/x-compress', 'application/x-cpio', 'application/x-dmg', 'application/x-eet', 'application/x-gzip', 'application/x-lha', 'application/x-lrzip', 'application/x-lz4', 'application/x-lzma', 'application/x-lzh', 'application/x-lzip', 'application/x-rar', 'application/x-rpm', 'application/x-stuffit', 'application/x-tar', 'application/x-xz', 'application/x-zoo', 'application/zip')\n| summarize count() by id_orig_h\n| top 10 by count_\n", + "displayName": "Corelight DNS Aggregated Events", + "category": "Microsoft Sentinel Parser", + "functionAlias": "corelight_dns_agg", + "query": "let DNSLookup = datatable(\n rcode: int,\n reply_code: string,\n cim_reply_code: string\n )[\n 0,\"NOERROR\",\"No Error\",\n 1,\"FORMERR\",\"FormErr\",\n 2,\"SERVFAIL\",\"ServFail\",\n 3,\"NXDOMAIN\",\"NXDomain\",\n 4,\"NOTIMP\",\"NotImp\",\n 5,\"REFUSED\",\"Refused\"\n ];\nlet dummy_table = datatable(TimeGenerated: datetime, uid_s: string) [];\nlet corelight_dns_agg = view () {\n union isfuzzy=true\n Corelight_v2_dns_agg_CL,\n dummy_table\n | summarize arg_max(TimeGenerated, *) by uid_s\n | join kind=leftouter \n ( corelight_conn\n | project uid, local_orig, local_resp \n ) on $left.uid_s == $right.uid\n | project-away uid\n | extend\n path=column_ifexists(\"_path_s\", \"\"),\n system_name=column_ifexists(\"_system_name_s\", \"\"),\n id_orig_h=column_ifexists(\"id_orig_h_s\", \"\"),\n id_orig_p=column_ifexists(\"id_orig_p_d\", real(null)),\n id_resp_h=column_ifexists(\"id_resp_h_s\", \"\"),\n id_resp_p=column_ifexists(\"id_resp_p_d\", real(null)),\n query = column_ifexists(\"query_s\", \"\"),\n qtype = column_ifexists(\"qtype_d\", real(null)),\n answers = column_ifexists(\"answers_s\", \"\"),\n rcode = column_ifexists(\"rcode_d\", long(null)),\n uid=column_ifexists(\"uid_s\", \"\"),\n qtype_name = column_ifexists(\"qtype_name_s\", \"\"),\n rcode_name = column_ifexists(\"rcode_name_s\", \"\"),\n rejected = column_ifexists(\"rejected_b\", \"\"),\n proto=column_ifexists(\"proto_s\", \"\"),\n trans_id = column_ifexists(\"trans_id_d\", \"\"),\n rtt = column_ifexists(\"rtt_d\", real(null)),\n qclass = column_ifexists(\"qclass_d\", real(null)),\n qclass_name = column_ifexists(\"qclass_name_s\", \"\"),\n AA = column_ifexists(\"AA_b\", \"\"),\n RA = column_ifexists(\"RA_b\", \"\"),\n RD = column_ifexists(\"RD_b\", \"\"),\n TC = column_ifexists(\"TC_b\", \"\"),\n TTLs = column_ifexists(\"TTLs_s\", \"\"),\n Z = column_ifexists(\"Z_d\", real(null)),\n icann_domain = column_ifexists(\"icann_domain_s\", \"\"),\n icann_host_subdomain = column_ifexists(\"icann_host_subdomain_s\", \"\"),\n icann_tld = column_ifexists(\"icann_tld_s\", \"\"),\n is_trusted_domain = column_ifexists(\"is_trusted_domain_b\", \"\")\n | extend rcode = toint(rcode)\n | lookup DNSLookup on rcode\n | extend\n EventVendor = \"Corelight\",\n EventProduct = \"CorelightSensor\",\n EventType = \"dns_agg\",\n ts = TimeGenerated,\n dns_flags_authoritative_answer = AA,\n dns_flags_recursion_available = RA,\n dns_flags_truncated_response = TC,\n ttl = TTLs,\n src = id_orig_h,\n src_ip = id_orig_h,\n src_port = id_orig_p,\n dest = id_resp_h,\n dest_ip = id_resp_h,\n dest_port = id_resp_p,\n record_class = qclass_name,\n record_type = qtype_name,\n reply_code_id = rcode,\n dns_flags_rejected = rejected,\n duration = rtt,\n response_time = rtt,\n transaction_id = trans_id,\n session_id = uid,\n answer_count = array_length(todynamic(answers)),\n query_count = array_length(todynamic(query)),\n sensor_name = coalesce(system_name, \"unknown\"),\n reply_code = cim_reply_code\n | extend\n is_broadcast =iff(src in(\"0.0.0.0\", \"255.255.255.255\") or dest in(\"255.255.255.255\", \"0.0.0.0\"),\"true\",\"false\"),\n is_dest_internal_ip = iff(local_resp == true, \"true\", \"false\"),\n is_src_internal_ip = iff(local_orig == true, \"true\", \"false\"),\n transport = iff(proto == \"icmp\" and id_orig_h matches regex \".*:.*\", \"icmp6\", proto),\n query_length = strlen(query),\n answer_length = iff(answer_count == 1, strlen(answers), tolong('')),\n message_type = iff(isnotnull(rcode), \"Response\", \"Query\")\n | project\n TimeGenerated,\n path,\n system_name,\n id_orig_h,\n id_orig_p,\n id_resp_h,\n id_resp_p,\n query,\n qtype,\n answers,\n answer_count,\n rcode,\n uid,\n qtype_name,\n rcode_name,\n rejected,\n proto,\n trans_id,\n rtt,\n qclass,\n qclass_name,\n AA,\n RA,\n RD,\n TC,\n TTLs,\n Z,\n icann_domain,\n icann_host_subdomain,\n icann_tld,\n is_trusted_domain,\n dns_flags_authoritative_answer,\n dns_flags_recursion_available,\n dns_flags_truncated_response,\n ttl,\n src,\n src_ip,\n src_port,\n dest,\n dest_ip,\n dest_port,\n record_class,\n record_type,\n reply_code_id,\n dns_flags_rejected,\n duration,\n response_time,\n transaction_id,\n session_id,\n query_count,\n sensor_name,\n reply_code,\n is_broadcast,\n is_dest_internal_ip,\n is_src_internal_ip,\n transport,\n query_length,\n answer_length,\n message_type,\n EventVendor,\n EventProduct,\n EventType,\n ts\n};\ncorelight_dns_agg\n", + "functionParameters": "", "version": 2, "tags": [ { "name": "description", - "value": "Query searches for top sources which transferred compressed archives over HTTP." - }, - { - "name": "tactics", - "value": "Exfiltration" - }, - { - "name": "techniques", - "value": "T1560" + "value": "" } ] } @@ -19263,16 +19345,18 @@ { "type": "Microsoft.OperationalInsights/workspaces/providers/metadata", "apiVersion": "2022-01-01-preview", - "name": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat('HuntingQuery-', last(split(resourceId('Microsoft.OperationalInsights/savedSearches', variables('huntingQueryObject2')._huntingQuerycontentId2),'/'))))]", + "name": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat('Parser-', last(split(variables('parserObject118')._parserId118,'/'))))]", + "dependsOn": [ + "[variables('parserObject118')._parserId118]" + ], "properties": { - "description": "Corelight Hunting Query 2", - "parentId": "[resourceId('Microsoft.OperationalInsights/savedSearches', variables('huntingQueryObject2')._huntingQuerycontentId2)]", - "contentId": "[variables('huntingQueryObject2')._huntingQuerycontentId2]", - "kind": "HuntingQuery", - "version": "[variables('huntingQueryObject2').huntingQueryVersion2]", + "parentId": "[resourceId('Microsoft.OperationalInsights/workspaces/savedSearches', parameters('workspace'), 'corelight_dns_agg')]", + "contentId": "[variables('parserObject118').parserContentId118]", + "kind": "Parser", + "version": "[variables('parserObject118').parserVersion118]", "source": { - "kind": "Solution", "name": "Corelight", + "kind": "Solution", "sourceId": "[variables('_solutionId')]" }, "author": { @@ -19293,53 +19377,97 @@ "packageName": "[variables('_solutionName')]", "packageId": "[variables('_solutionId')]", "contentSchemaVersion": "3.0.0", - "contentId": "[variables('huntingQueryObject2')._huntingQuerycontentId2]", - "contentKind": "HuntingQuery", - "displayName": "Corelight - Compressed Files Transferred over HTTP", - "contentProductId": "[concat(take(variables('_solutionId'),50),'-','hq','-', uniqueString(concat(variables('_solutionId'),'-','HuntingQuery','-',variables('huntingQueryObject2')._huntingQuerycontentId2,'-', '1.0.0')))]", - "id": "[concat(take(variables('_solutionId'),50),'-','hq','-', uniqueString(concat(variables('_solutionId'),'-','HuntingQuery','-',variables('huntingQueryObject2')._huntingQuerycontentId2,'-', '1.0.0')))]", - "version": "1.0.0" + "contentId": "[variables('parserObject118').parserContentId118]", + "contentKind": "Parser", + "displayName": "Corelight DNS Aggregated Events", + "contentProductId": "[concat(take(variables('_solutionId'),50),'-','pr','-', uniqueString(concat(variables('_solutionId'),'-','Parser','-',variables('parserObject118').parserContentId118,'-', '1.1.0')))]", + "id": "[concat(take(variables('_solutionId'),50),'-','pr','-', uniqueString(concat(variables('_solutionId'),'-','Parser','-',variables('parserObject118').parserContentId118,'-', '1.1.0')))]", + "version": "[variables('parserObject118').parserVersion118]" + } + }, + { + "type": "Microsoft.OperationalInsights/workspaces/savedSearches", + "apiVersion": "2022-10-01", + "name": "[variables('parserObject118')._parserName118]", + "location": "[parameters('workspace-location')]", + "properties": { + "eTag": "*", + "displayName": "Corelight DNS Aggregated Events", + "category": "Microsoft Sentinel Parser", + "functionAlias": "corelight_dns_agg", + "query": "let DNSLookup = datatable(\n rcode: int,\n reply_code: string,\n cim_reply_code: string\n )[\n 0,\"NOERROR\",\"No Error\",\n 1,\"FORMERR\",\"FormErr\",\n 2,\"SERVFAIL\",\"ServFail\",\n 3,\"NXDOMAIN\",\"NXDomain\",\n 4,\"NOTIMP\",\"NotImp\",\n 5,\"REFUSED\",\"Refused\"\n ];\nlet dummy_table = datatable(TimeGenerated: datetime, uid_s: string) [];\nlet corelight_dns_agg = view () {\n union isfuzzy=true\n Corelight_v2_dns_agg_CL,\n dummy_table\n | summarize arg_max(TimeGenerated, *) by uid_s\n | join kind=leftouter \n ( corelight_conn\n | project uid, local_orig, local_resp \n ) on $left.uid_s == $right.uid\n | project-away uid\n | extend\n path=column_ifexists(\"_path_s\", \"\"),\n system_name=column_ifexists(\"_system_name_s\", \"\"),\n id_orig_h=column_ifexists(\"id_orig_h_s\", \"\"),\n id_orig_p=column_ifexists(\"id_orig_p_d\", real(null)),\n id_resp_h=column_ifexists(\"id_resp_h_s\", \"\"),\n id_resp_p=column_ifexists(\"id_resp_p_d\", real(null)),\n query = column_ifexists(\"query_s\", \"\"),\n qtype = column_ifexists(\"qtype_d\", real(null)),\n answers = column_ifexists(\"answers_s\", \"\"),\n rcode = column_ifexists(\"rcode_d\", long(null)),\n uid=column_ifexists(\"uid_s\", \"\"),\n qtype_name = column_ifexists(\"qtype_name_s\", \"\"),\n rcode_name = column_ifexists(\"rcode_name_s\", \"\"),\n rejected = column_ifexists(\"rejected_b\", \"\"),\n proto=column_ifexists(\"proto_s\", \"\"),\n trans_id = column_ifexists(\"trans_id_d\", \"\"),\n rtt = column_ifexists(\"rtt_d\", real(null)),\n qclass = column_ifexists(\"qclass_d\", real(null)),\n qclass_name = column_ifexists(\"qclass_name_s\", \"\"),\n AA = column_ifexists(\"AA_b\", \"\"),\n RA = column_ifexists(\"RA_b\", \"\"),\n RD = column_ifexists(\"RD_b\", \"\"),\n TC = column_ifexists(\"TC_b\", \"\"),\n TTLs = column_ifexists(\"TTLs_s\", \"\"),\n Z = column_ifexists(\"Z_d\", real(null)),\n icann_domain = column_ifexists(\"icann_domain_s\", \"\"),\n icann_host_subdomain = column_ifexists(\"icann_host_subdomain_s\", \"\"),\n icann_tld = column_ifexists(\"icann_tld_s\", \"\"),\n is_trusted_domain = column_ifexists(\"is_trusted_domain_b\", \"\")\n | extend rcode = toint(rcode)\n | lookup DNSLookup on rcode\n | extend\n EventVendor = \"Corelight\",\n EventProduct = \"CorelightSensor\",\n EventType = \"dns_agg\",\n ts = TimeGenerated,\n dns_flags_authoritative_answer = AA,\n dns_flags_recursion_available = RA,\n dns_flags_truncated_response = TC,\n ttl = TTLs,\n src = id_orig_h,\n src_ip = id_orig_h,\n src_port = id_orig_p,\n dest = id_resp_h,\n dest_ip = id_resp_h,\n dest_port = id_resp_p,\n record_class = qclass_name,\n record_type = qtype_name,\n reply_code_id = rcode,\n dns_flags_rejected = rejected,\n duration = rtt,\n response_time = rtt,\n transaction_id = trans_id,\n session_id = uid,\n answer_count = array_length(todynamic(answers)),\n query_count = array_length(todynamic(query)),\n sensor_name = coalesce(system_name, \"unknown\"),\n reply_code = cim_reply_code\n | extend\n is_broadcast =iff(src in(\"0.0.0.0\", \"255.255.255.255\") or dest in(\"255.255.255.255\", \"0.0.0.0\"),\"true\",\"false\"),\n is_dest_internal_ip = iff(local_resp == true, \"true\", \"false\"),\n is_src_internal_ip = iff(local_orig == true, \"true\", \"false\"),\n transport = iff(proto == \"icmp\" and id_orig_h matches regex \".*:.*\", \"icmp6\", proto),\n query_length = strlen(query),\n answer_length = iff(answer_count == 1, strlen(answers), tolong('')),\n message_type = iff(isnotnull(rcode), \"Response\", \"Query\")\n | project\n TimeGenerated,\n path,\n system_name,\n id_orig_h,\n id_orig_p,\n id_resp_h,\n id_resp_p,\n query,\n qtype,\n answers,\n answer_count,\n rcode,\n uid,\n qtype_name,\n rcode_name,\n rejected,\n proto,\n trans_id,\n rtt,\n qclass,\n qclass_name,\n AA,\n RA,\n RD,\n TC,\n TTLs,\n Z,\n icann_domain,\n icann_host_subdomain,\n icann_tld,\n is_trusted_domain,\n dns_flags_authoritative_answer,\n dns_flags_recursion_available,\n dns_flags_truncated_response,\n ttl,\n src,\n src_ip,\n src_port,\n dest,\n dest_ip,\n dest_port,\n record_class,\n record_type,\n reply_code_id,\n dns_flags_rejected,\n duration,\n response_time,\n transaction_id,\n session_id,\n query_count,\n sensor_name,\n reply_code,\n is_broadcast,\n is_dest_internal_ip,\n is_src_internal_ip,\n transport,\n query_length,\n answer_length,\n message_type,\n EventVendor,\n EventProduct,\n EventType,\n ts\n};\ncorelight_dns_agg\n", + "functionParameters": "", + "version": 2, + "tags": [ + { + "name": "description", + "value": "" + } + ] + } + }, + { + "type": "Microsoft.OperationalInsights/workspaces/providers/metadata", + "apiVersion": "2022-01-01-preview", + "location": "[parameters('workspace-location')]", + "name": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat('Parser-', last(split(variables('parserObject118')._parserId118,'/'))))]", + "dependsOn": [ + "[variables('parserObject118')._parserId118]" + ], + "properties": { + "parentId": "[resourceId('Microsoft.OperationalInsights/workspaces/savedSearches', parameters('workspace'), 'corelight_dns_agg')]", + "contentId": "[variables('parserObject118').parserContentId118]", + "kind": "Parser", + "version": "[variables('parserObject118').parserVersion118]", + "source": { + "kind": "Solution", + "name": "Corelight", + "sourceId": "[variables('_solutionId')]" + }, + "author": { + "name": "Corelight", + "email": "[variables('_email')]" + }, + "support": { + "name": "Corelight", + "tier": "Partner", + "link": "https://support.corelight.com/" + } } }, { "type": "Microsoft.OperationalInsights/workspaces/providers/contentTemplates", "apiVersion": "2023-04-01-preview", - "name": "[variables('huntingQueryObject3').huntingQueryTemplateSpecName3]", + "name": "[variables('parserObject119').parserTemplateSpecName119]", "location": "[parameters('workspace-location')]", "dependsOn": [ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "CorelightDataTransferedByIp_HuntingQueries Hunting Query with template version 3.2.1", + "description": "corelight_files_agg Data Parser with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", - "contentVersion": "[variables('huntingQueryObject3').huntingQueryVersion3]", + "contentVersion": "[variables('parserObject119').parserVersion119]", "parameters": {}, "variables": {}, "resources": [ { - "type": "Microsoft.OperationalInsights/savedSearches", + "name": "[variables('parserObject119')._parserName119]", "apiVersion": "2022-10-01", - "name": "Corelight_Hunting_Query_3", + "type": "Microsoft.OperationalInsights/workspaces/savedSearches", "location": "[parameters('workspace-location')]", "properties": { "eTag": "*", - "displayName": "Corelight - Top sources of data transferred", - "category": "Hunting Queries", - "query": "corelight_http\n| where TimeGenerated > ago(24h)\n| where method in~ ('POST', 'PUT')\n| summarize data_sent = sum(tolong(request_body_len)) by id_orig_h\n| top 10 by data_sent\n", + "displayName": "Corelight Files Aggregated Events", + "category": "Microsoft Sentinel Parser", + "functionAlias": "corelight_files_agg", + "query": "let dummy_table = datatable(TimeGenerated: datetime, uid_s: string) [];\nlet corelight_files_agg = view () {\n union isfuzzy=true\n Corelight_v2_files_agg_CL,\n dummy_table\n | summarize arg_max(TimeGenerated, *) by uid_s\n | join kind=leftouter \n ( corelight_conn\n | project uid, local_orig, local_resp \n ) on $left.uid_s == $right.uid\n | project-away uid\n | extend\n path=column_ifexists(\"_path_s\", \"\"),\n system_name=column_ifexists(\"_system_name_s\", \"\"),\n id_orig_h=column_ifexists(\"id_orig_h_s\", \"\"),\n id_orig_p=column_ifexists(\"id_orig_p_d\", real(null)),\n id_resp_h=column_ifexists(\"id_resp_h_s\", \"\"),\n id_resp_p=column_ifexists(\"id_resp_p_d\", real(null)),\n source = column_ifexists(\"source_s\", \"\"),\n analyzers = column_ifexists(\"analyzers_s\", \"\"),\n filename = column_ifexists(\"filename_s\", \"\"),\n md5 = column_ifexists(\"md5_s\", \"\"),\n is_orig = column_ifexists(\"is_orig_b\", \"\"),\n local_orig = column_ifexists(\"local_orig_b\", \"\"),\n sha1 = column_ifexists(\"sha1_s\", \"\"),\n sha256 = column_ifexists(\"sha256_s\", \"\"),\n fuid = column_ifexists(\"fuid_s\", \"\"),\n uid=column_ifexists(\"uid_s\", \"\"),\n parent_fuid = column_ifexists(\"parent_fuid_s\", \"\"),\n mime_type = column_ifexists(\"mime_type_s\", \"\"),\n duration = column_ifexists(\"duration_d\", real(null)),\n seen_bytes = column_ifexists(\"seen_bytes_d\", real(null)),\n total_bytes = column_ifexists(\"total_bytes_d\", real(null)),\n missing_bytes = column_ifexists(\"missing_bytes_d\", real(null)),\n overflow_bytes = column_ifexists(\"overflow_bytes_d\", real(null)),\n timedout = column_ifexists(\"timedout_b\", \"\"),\n depth = column_ifexists(\"depth_d\", real(null))\n | extend\n EventVendor = \"Corelight\",\n EventProduct = \"CorelightSensor\",\n EventType = \"files_agg\",\n ts = TimeGenerated,\n bytes = seen_bytes,\n file_size = total_bytes,\n file_name = filename,\n object = filename,\n src = id_orig_h,\n src_ip = id_orig_h,\n src_port = id_orig_p,\n dest = id_resp_h,\n dest_ip = id_resp_h,\n dest_port = id_resp_p,\n app = source,\n file_hash = coalesce(md5, sha1, sha256, \"unknown\"),\n sensor_name = coalesce(system_name, \"unknown\")\n | extend\n is_broadcast =iff(src in(\"0.0.0.0\", \"255.255.255.255\") or dest in(\"255.255.255.255\", \"0.0.0.0\"),\"true\",\"false\"),\n is_dest_internal_ip = iff(local_resp == true, \"true\", \"false\"),\n is_src_internal_ip = iff(local_orig == true, \"true\", \"false\")\n | project\n TimeGenerated,\n path,\n system_name,\n id_orig_h,\n id_orig_p,\n id_resp_h,\n id_resp_p,\n source,\n analyzers,\n filename,\n md5,\n is_orig,\n local_orig,\n sha1,\n sha256,\n fuid,\n uid,\n parent_fuid,\n mime_type,\n duration,\n seen_bytes,\n total_bytes,\n missing_bytes,\n overflow_bytes,\n timedout,\n depth,\n bytes,\n file_size,\n file_name,\n object,\n src,\n src_ip,\n src_port,\n dest,\n dest_ip,\n dest_port,\n app,\n file_hash,\n sensor_name,\n EventVendor,\n EventProduct,\n EventType,\n ts\n};\ncorelight_files_agg\n", + "functionParameters": "", "version": 2, "tags": [ { "name": "description", - "value": "Query searches for top sources by transferred data over period of time." - }, - { - "name": "tactics", - "value": "Exfiltration" - }, - { - "name": "techniques", - "value": "T1560" + "value": "" } ] } @@ -19347,16 +19475,18 @@ { "type": "Microsoft.OperationalInsights/workspaces/providers/metadata", "apiVersion": "2022-01-01-preview", - "name": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat('HuntingQuery-', last(split(resourceId('Microsoft.OperationalInsights/savedSearches', variables('huntingQueryObject3')._huntingQuerycontentId3),'/'))))]", + "name": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat('Parser-', last(split(variables('parserObject119')._parserId119,'/'))))]", + "dependsOn": [ + "[variables('parserObject119')._parserId119]" + ], "properties": { - "description": "Corelight Hunting Query 3", - "parentId": "[resourceId('Microsoft.OperationalInsights/savedSearches', variables('huntingQueryObject3')._huntingQuerycontentId3)]", - "contentId": "[variables('huntingQueryObject3')._huntingQuerycontentId3]", - "kind": "HuntingQuery", - "version": "[variables('huntingQueryObject3').huntingQueryVersion3]", + "parentId": "[resourceId('Microsoft.OperationalInsights/workspaces/savedSearches', parameters('workspace'), 'corelight_files_agg')]", + "contentId": "[variables('parserObject119').parserContentId119]", + "kind": "Parser", + "version": "[variables('parserObject119').parserVersion119]", "source": { - "kind": "Solution", "name": "Corelight", + "kind": "Solution", "sourceId": "[variables('_solutionId')]" }, "author": { @@ -19377,35 +19507,727 @@ "packageName": "[variables('_solutionName')]", "packageId": "[variables('_solutionId')]", "contentSchemaVersion": "3.0.0", - "contentId": "[variables('huntingQueryObject3')._huntingQuerycontentId3]", - "contentKind": "HuntingQuery", - "displayName": "Corelight - Top sources of data transferred", - "contentProductId": "[concat(take(variables('_solutionId'),50),'-','hq','-', uniqueString(concat(variables('_solutionId'),'-','HuntingQuery','-',variables('huntingQueryObject3')._huntingQuerycontentId3,'-', '1.0.0')))]", - "id": "[concat(take(variables('_solutionId'),50),'-','hq','-', uniqueString(concat(variables('_solutionId'),'-','HuntingQuery','-',variables('huntingQueryObject3')._huntingQuerycontentId3,'-', '1.0.0')))]", - "version": "1.0.0" + "contentId": "[variables('parserObject119').parserContentId119]", + "contentKind": "Parser", + "displayName": "Corelight Files Aggregated Events", + "contentProductId": "[concat(take(variables('_solutionId'),50),'-','pr','-', uniqueString(concat(variables('_solutionId'),'-','Parser','-',variables('parserObject119').parserContentId119,'-', '1.1.0')))]", + "id": "[concat(take(variables('_solutionId'),50),'-','pr','-', uniqueString(concat(variables('_solutionId'),'-','Parser','-',variables('parserObject119').parserContentId119,'-', '1.1.0')))]", + "version": "[variables('parserObject119').parserVersion119]" } }, { - "type": "Microsoft.OperationalInsights/workspaces/providers/contentTemplates", - "apiVersion": "2023-04-01-preview", - "name": "[variables('huntingQueryObject4').huntingQueryTemplateSpecName4]", + "type": "Microsoft.OperationalInsights/workspaces/savedSearches", + "apiVersion": "2022-10-01", + "name": "[variables('parserObject119')._parserName119]", "location": "[parameters('workspace-location')]", - "dependsOn": [ - "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" - ], "properties": { - "description": "CorelightExternalServices_HuntingQueries Hunting Query with template version 3.2.1", - "mainTemplate": { - "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", - "contentVersion": "[variables('huntingQueryObject4').huntingQueryVersion4]", - "parameters": {}, - "variables": {}, - "resources": [ - { - "type": "Microsoft.OperationalInsights/savedSearches", - "apiVersion": "2022-10-01", - "name": "Corelight_Hunting_Query_4", - "location": "[parameters('workspace-location')]", + "eTag": "*", + "displayName": "Corelight Files Aggregated Events", + "category": "Microsoft Sentinel Parser", + "functionAlias": "corelight_files_agg", + "query": "let dummy_table = datatable(TimeGenerated: datetime, uid_s: string) [];\nlet corelight_files_agg = view () {\n union isfuzzy=true\n Corelight_v2_files_agg_CL,\n dummy_table\n | summarize arg_max(TimeGenerated, *) by uid_s\n | join kind=leftouter \n ( corelight_conn\n | project uid, local_orig, local_resp \n ) on $left.uid_s == $right.uid\n | project-away uid\n | extend\n path=column_ifexists(\"_path_s\", \"\"),\n system_name=column_ifexists(\"_system_name_s\", \"\"),\n id_orig_h=column_ifexists(\"id_orig_h_s\", \"\"),\n id_orig_p=column_ifexists(\"id_orig_p_d\", real(null)),\n id_resp_h=column_ifexists(\"id_resp_h_s\", \"\"),\n id_resp_p=column_ifexists(\"id_resp_p_d\", real(null)),\n source = column_ifexists(\"source_s\", \"\"),\n analyzers = column_ifexists(\"analyzers_s\", \"\"),\n filename = column_ifexists(\"filename_s\", \"\"),\n md5 = column_ifexists(\"md5_s\", \"\"),\n is_orig = column_ifexists(\"is_orig_b\", \"\"),\n local_orig = column_ifexists(\"local_orig_b\", \"\"),\n sha1 = column_ifexists(\"sha1_s\", \"\"),\n sha256 = column_ifexists(\"sha256_s\", \"\"),\n fuid = column_ifexists(\"fuid_s\", \"\"),\n uid=column_ifexists(\"uid_s\", \"\"),\n parent_fuid = column_ifexists(\"parent_fuid_s\", \"\"),\n mime_type = column_ifexists(\"mime_type_s\", \"\"),\n duration = column_ifexists(\"duration_d\", real(null)),\n seen_bytes = column_ifexists(\"seen_bytes_d\", real(null)),\n total_bytes = column_ifexists(\"total_bytes_d\", real(null)),\n missing_bytes = column_ifexists(\"missing_bytes_d\", real(null)),\n overflow_bytes = column_ifexists(\"overflow_bytes_d\", real(null)),\n timedout = column_ifexists(\"timedout_b\", \"\"),\n depth = column_ifexists(\"depth_d\", real(null))\n | extend\n EventVendor = \"Corelight\",\n EventProduct = \"CorelightSensor\",\n EventType = \"files_agg\",\n ts = TimeGenerated,\n bytes = seen_bytes,\n file_size = total_bytes,\n file_name = filename,\n object = filename,\n src = id_orig_h,\n src_ip = id_orig_h,\n src_port = id_orig_p,\n dest = id_resp_h,\n dest_ip = id_resp_h,\n dest_port = id_resp_p,\n app = source,\n file_hash = coalesce(md5, sha1, sha256, \"unknown\"),\n sensor_name = coalesce(system_name, \"unknown\")\n | extend\n is_broadcast =iff(src in(\"0.0.0.0\", \"255.255.255.255\") or dest in(\"255.255.255.255\", \"0.0.0.0\"),\"true\",\"false\"),\n is_dest_internal_ip = iff(local_resp == true, \"true\", \"false\"),\n is_src_internal_ip = iff(local_orig == true, \"true\", \"false\")\n | project\n TimeGenerated,\n path,\n system_name,\n id_orig_h,\n id_orig_p,\n id_resp_h,\n id_resp_p,\n source,\n analyzers,\n filename,\n md5,\n is_orig,\n local_orig,\n sha1,\n sha256,\n fuid,\n uid,\n parent_fuid,\n mime_type,\n duration,\n seen_bytes,\n total_bytes,\n missing_bytes,\n overflow_bytes,\n timedout,\n depth,\n bytes,\n file_size,\n file_name,\n object,\n src,\n src_ip,\n src_port,\n dest,\n dest_ip,\n dest_port,\n app,\n file_hash,\n sensor_name,\n EventVendor,\n EventProduct,\n EventType,\n ts\n};\ncorelight_files_agg\n", + "functionParameters": "", + "version": 2, + "tags": [ + { + "name": "description", + "value": "" + } + ] + } + }, + { + "type": "Microsoft.OperationalInsights/workspaces/providers/metadata", + "apiVersion": "2022-01-01-preview", + "location": "[parameters('workspace-location')]", + "name": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat('Parser-', last(split(variables('parserObject119')._parserId119,'/'))))]", + "dependsOn": [ + "[variables('parserObject119')._parserId119]" + ], + "properties": { + "parentId": "[resourceId('Microsoft.OperationalInsights/workspaces/savedSearches', parameters('workspace'), 'corelight_files_agg')]", + "contentId": "[variables('parserObject119').parserContentId119]", + "kind": "Parser", + "version": "[variables('parserObject119').parserVersion119]", + "source": { + "kind": "Solution", + "name": "Corelight", + "sourceId": "[variables('_solutionId')]" + }, + "author": { + "name": "Corelight", + "email": "[variables('_email')]" + }, + "support": { + "name": "Corelight", + "tier": "Partner", + "link": "https://support.corelight.com/" + } + } + }, + { + "type": "Microsoft.OperationalInsights/workspaces/providers/contentTemplates", + "apiVersion": "2023-04-01-preview", + "name": "[variables('parserObject120').parserTemplateSpecName120]", + "location": "[parameters('workspace-location')]", + "dependsOn": [ + "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" + ], + "properties": { + "description": "corelight_http_agg Data Parser with template version 3.2.2", + "mainTemplate": { + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "[variables('parserObject120').parserVersion120]", + "parameters": {}, + "variables": {}, + "resources": [ + { + "name": "[variables('parserObject120')._parserName120]", + "apiVersion": "2022-10-01", + "type": "Microsoft.OperationalInsights/workspaces/savedSearches", + "location": "[parameters('workspace-location')]", + "properties": { + "eTag": "*", + "displayName": "Corelight HTTP Aggregated Events", + "category": "Microsoft Sentinel Parser", + "functionAlias": "corelight_http_agg", + "query": "let StatusLookup = datatable(\n status: string,\n action: string\n )[\n \"success\",\"allowed\",\n \"failure\",\"blocked\",\n \"200\",\"success\",\n \"204\",\"success\",\n \"206\",\"success\",\n \"207\",\"success\",\n \"301\",\"success\",\n \"302\",\"success\",\n \"303\",\"success\",\n \"304\",\"success\",\n \"307\",\"success\",\n \"400\",\"failure\",\n \"401\",\"failure\",\n \"403\",\"failure\",\n \"404\",\"failure\",\n \"408\",\"failure\",\n \"500\",\"failure\",\n \"503\",\"failure\",\n \"504\",\"failure\"\n ];\n let dummy_table = datatable(TimeGenerated: datetime, uid_s: string) [];\n let corelight_http_agg = view () {\n union isfuzzy=true\n Corelight_v2_http_agg_CL,\n dummy_table\n | summarize arg_max(TimeGenerated, *) by uid_s\n | join kind=leftouter \n ( corelight_conn\n | project uid, local_orig, local_resp \n ) on $left.uid_s == $right.uid\n | project-away uid\n | extend\n path=column_ifexists(\"_path_s\", \"\"),\n system_name=column_ifexists(\"_system_name_s\", \"\"),\n id_orig_h=column_ifexists(\"id_orig_h_s\", \"\"),\n id_orig_p=column_ifexists(\"id_orig_p_d\", real(null)),\n id_resp_h=column_ifexists(\"id_resp_h_s\", \"\"),\n id_resp_p=column_ifexists(\"id_resp_p_d\", real(null)),\n method = column_ifexists(\"method_s\", \"\"),\n host = column_ifexists(\"host_s\", \"\"),\n uid = column_ifexists(\"uid_s\", \"\"),\n uri = column_ifexists(\"uri_s\", \"\"),\n referrer = column_ifexists(\"referrer_s\", \"\"),\n status_code = column_ifexists(\"status_code_d\", real(null)),\n status_msg = column_ifexists(\"status_msg_s\", \"\"),\n request_body_len = column_ifexists(\"request_body_len_d\", real(null)),\n response_body_len = column_ifexists(\"response_body_len_d\", real(null)),\n tags = column_ifexists(\"tags_s\", \"\"),\n orig_mime_types = column_ifexists(\"orig_mime_types_s\", \"\"),\n resp_mime_types = column_ifexists(\"resp_mime_types_s\", \"\"),\n post_body = column_ifexists(\"post_body_s\", \"\"),\n orig_fuids = column_ifexists(\"orig_fuids_s\", \"\"),\n orig_filenames = column_ifexists(\"orig_filenames_s\", \"\"),\n resp_fuids = column_ifexists(\"resp_fuids_s\", \"\"),\n resp_filenames = column_ifexists(\"resp_filenames_s\", \"\"),\n version = column_ifexists(\"version_s\", \"\"),\n user_agent = column_ifexists(\"user_agent_s\", \"\"),\n username = column_ifexists(\"username_s\", \"\"),\n pwd = column_ifexists(\"password_s\", \"\"),\n proxied = column_ifexists(\"proxied_s\", \"\"),\n origin = column_ifexists(\"origin_s\", \"\"),\n info_code = column_ifexists(\"info_code_d\", real(null)),\n info_msg = column_ifexists(\"info_msg_s\", \"\")\n | extend status_code = tostring(toint(status_code))\n | lookup StatusLookup on $left.status_code == $right.status\n | extend\n EventVendor = \"Corelight\",\n EventProduct = \"CorelightSensor\",\n EventType = \"http_agg\",\n ts = TimeGenerated,\n dest_host = host,\n src = id_orig_h,\n src_ip = id_orig_h,\n src_port = id_orig_p,\n dest = id_resp_h,\n dest_ip = id_resp_h,\n dest_port = id_resp_p,\n http_method = method,\n bytes_in = request_body_len,\n bytes_out = response_body_len,\n status = status_code,\n vendor_action = status_msg,\n uri_path = uri,\n object = resp_filenames,\n http_user_agent = user_agent,\n http_referrer = referrer,\n http_content_type = orig_mime_types,\n sensor_name = coalesce(system_name, \"unknown\"),\n http_version = version,\n http_username = username\n | extend\n http_user_agent_length = strlen(http_user_agent),\n bytes = bytes_in + bytes_out,\n is_broadcast =iff(src in(\"0.0.0.0\", \"255.255.255.255\") or dest in(\"255.255.255.255\", \"0.0.0.0\"),\"true\",\"false\"),\n is_dest_internal_ip = iff(local_resp == true, \"true\", \"false\"),\n is_src_internal_ip = iff(local_orig == true, \"true\", \"false\"),\n host_header=dest_host,\n referrer_domain_domain=parse_url(referrer).Host,\n referrer_domain_ip=strcat(parse_url(referrer).Host, \":\", parse_url(referrer).Port),\n direction=case(local_orig==\"true\" and local_resp==\"true\", \"internal\", local_orig==\"true\" and local_resp==\"false\", \"outbound\", local_orig==\"false\" and local_resp==\"false\", \"external\", local_orig==\"false\" and local_resp==\"true\", \"inbound\", \"unknown\")\n | extend \n url = strcat(\"http://\",host_header,uri),\n url_domain = host_header\n | project\n system_name,\n path,\n id_orig_h,\n id_orig_p,\n id_resp_h,\n id_resp_p,\n method,\n host,\n uid,\n uri,\n referrer,\n status_code,\n status_msg,\n request_body_len,\n response_body_len,\n tags,\n orig_mime_types,\n resp_mime_types,\n post_body,\n orig_fuids,\n orig_filenames,\n resp_fuids,\n resp_filenames,\n version,\n user_agent,\n username,\n pwd,\n proxied,\n origin,\n info_code,\n info_msg,\n EventVendor,\n EventProduct,\n EventType,\n ts,\n dest_host,\n src,\n src_ip,\n src_port,\n dest,\n dest_ip,\n dest_port,\n http_method,\n bytes_in,\n bytes_out,\n status,\n vendor_action,\n uri_path,\n object,\n http_user_agent,\n http_referrer,\n http_content_type,\n sensor_name,\n http_version,\n http_username,\n http_user_agent_length,\n bytes,\n is_broadcast,\n is_dest_internal_ip,\n is_src_internal_ip,\n host_header,\n referrer_domain_domain,\n referrer_domain_ip,\n direction,\n url,\n url_domain,\n TimeGenerated\n };\n corelight_http_agg\n", + "functionParameters": "", + "version": 2, + "tags": [ + { + "name": "description", + "value": "" + } + ] + } + }, + { + "type": "Microsoft.OperationalInsights/workspaces/providers/metadata", + "apiVersion": "2022-01-01-preview", + "name": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat('Parser-', last(split(variables('parserObject120')._parserId120,'/'))))]", + "dependsOn": [ + "[variables('parserObject120')._parserId120]" + ], + "properties": { + "parentId": "[resourceId('Microsoft.OperationalInsights/workspaces/savedSearches', parameters('workspace'), 'corelight_http_agg')]", + "contentId": "[variables('parserObject120').parserContentId120]", + "kind": "Parser", + "version": "[variables('parserObject120').parserVersion120]", + "source": { + "name": "Corelight", + "kind": "Solution", + "sourceId": "[variables('_solutionId')]" + }, + "author": { + "name": "Corelight", + "email": "[variables('_email')]" + }, + "support": { + "name": "Corelight", + "tier": "Partner", + "link": "https://support.corelight.com/" + } + } + } + ] + }, + "packageKind": "Solution", + "packageVersion": "[variables('_solutionVersion')]", + "packageName": "[variables('_solutionName')]", + "packageId": "[variables('_solutionId')]", + "contentSchemaVersion": "3.0.0", + "contentId": "[variables('parserObject120').parserContentId120]", + "contentKind": "Parser", + "displayName": "Corelight HTTP Aggregated Events", + "contentProductId": "[concat(take(variables('_solutionId'),50),'-','pr','-', uniqueString(concat(variables('_solutionId'),'-','Parser','-',variables('parserObject120').parserContentId120,'-', '1.1.0')))]", + "id": "[concat(take(variables('_solutionId'),50),'-','pr','-', uniqueString(concat(variables('_solutionId'),'-','Parser','-',variables('parserObject120').parserContentId120,'-', '1.1.0')))]", + "version": "[variables('parserObject120').parserVersion120]" + } + }, + { + "type": "Microsoft.OperationalInsights/workspaces/savedSearches", + "apiVersion": "2022-10-01", + "name": "[variables('parserObject120')._parserName120]", + "location": "[parameters('workspace-location')]", + "properties": { + "eTag": "*", + "displayName": "Corelight HTTP Aggregated Events", + "category": "Microsoft Sentinel Parser", + "functionAlias": "corelight_http_agg", + "query": "let StatusLookup = datatable(\n status: string,\n action: string\n )[\n \"success\",\"allowed\",\n \"failure\",\"blocked\",\n \"200\",\"success\",\n \"204\",\"success\",\n \"206\",\"success\",\n \"207\",\"success\",\n \"301\",\"success\",\n \"302\",\"success\",\n \"303\",\"success\",\n \"304\",\"success\",\n \"307\",\"success\",\n \"400\",\"failure\",\n \"401\",\"failure\",\n \"403\",\"failure\",\n \"404\",\"failure\",\n \"408\",\"failure\",\n \"500\",\"failure\",\n \"503\",\"failure\",\n \"504\",\"failure\"\n ];\n let dummy_table = datatable(TimeGenerated: datetime, uid_s: string) [];\n let corelight_http_agg = view () {\n union isfuzzy=true\n Corelight_v2_http_agg_CL,\n dummy_table\n | summarize arg_max(TimeGenerated, *) by uid_s\n | join kind=leftouter \n ( corelight_conn\n | project uid, local_orig, local_resp \n ) on $left.uid_s == $right.uid\n | project-away uid\n | extend\n path=column_ifexists(\"_path_s\", \"\"),\n system_name=column_ifexists(\"_system_name_s\", \"\"),\n id_orig_h=column_ifexists(\"id_orig_h_s\", \"\"),\n id_orig_p=column_ifexists(\"id_orig_p_d\", real(null)),\n id_resp_h=column_ifexists(\"id_resp_h_s\", \"\"),\n id_resp_p=column_ifexists(\"id_resp_p_d\", real(null)),\n method = column_ifexists(\"method_s\", \"\"),\n host = column_ifexists(\"host_s\", \"\"),\n uid = column_ifexists(\"uid_s\", \"\"),\n uri = column_ifexists(\"uri_s\", \"\"),\n referrer = column_ifexists(\"referrer_s\", \"\"),\n status_code = column_ifexists(\"status_code_d\", real(null)),\n status_msg = column_ifexists(\"status_msg_s\", \"\"),\n request_body_len = column_ifexists(\"request_body_len_d\", real(null)),\n response_body_len = column_ifexists(\"response_body_len_d\", real(null)),\n tags = column_ifexists(\"tags_s\", \"\"),\n orig_mime_types = column_ifexists(\"orig_mime_types_s\", \"\"),\n resp_mime_types = column_ifexists(\"resp_mime_types_s\", \"\"),\n post_body = column_ifexists(\"post_body_s\", \"\"),\n orig_fuids = column_ifexists(\"orig_fuids_s\", \"\"),\n orig_filenames = column_ifexists(\"orig_filenames_s\", \"\"),\n resp_fuids = column_ifexists(\"resp_fuids_s\", \"\"),\n resp_filenames = column_ifexists(\"resp_filenames_s\", \"\"),\n version = column_ifexists(\"version_s\", \"\"),\n user_agent = column_ifexists(\"user_agent_s\", \"\"),\n username = column_ifexists(\"username_s\", \"\"),\n pwd = column_ifexists(\"password_s\", \"\"),\n proxied = column_ifexists(\"proxied_s\", \"\"),\n origin = column_ifexists(\"origin_s\", \"\"),\n info_code = column_ifexists(\"info_code_d\", real(null)),\n info_msg = column_ifexists(\"info_msg_s\", \"\")\n | extend status_code = tostring(toint(status_code))\n | lookup StatusLookup on $left.status_code == $right.status\n | extend\n EventVendor = \"Corelight\",\n EventProduct = \"CorelightSensor\",\n EventType = \"http_agg\",\n ts = TimeGenerated,\n dest_host = host,\n src = id_orig_h,\n src_ip = id_orig_h,\n src_port = id_orig_p,\n dest = id_resp_h,\n dest_ip = id_resp_h,\n dest_port = id_resp_p,\n http_method = method,\n bytes_in = request_body_len,\n bytes_out = response_body_len,\n status = status_code,\n vendor_action = status_msg,\n uri_path = uri,\n object = resp_filenames,\n http_user_agent = user_agent,\n http_referrer = referrer,\n http_content_type = orig_mime_types,\n sensor_name = coalesce(system_name, \"unknown\"),\n http_version = version,\n http_username = username\n | extend\n http_user_agent_length = strlen(http_user_agent),\n bytes = bytes_in + bytes_out,\n is_broadcast =iff(src in(\"0.0.0.0\", \"255.255.255.255\") or dest in(\"255.255.255.255\", \"0.0.0.0\"),\"true\",\"false\"),\n is_dest_internal_ip = iff(local_resp == true, \"true\", \"false\"),\n is_src_internal_ip = iff(local_orig == true, \"true\", \"false\"),\n host_header=dest_host,\n referrer_domain_domain=parse_url(referrer).Host,\n referrer_domain_ip=strcat(parse_url(referrer).Host, \":\", parse_url(referrer).Port),\n direction=case(local_orig==\"true\" and local_resp==\"true\", \"internal\", local_orig==\"true\" and local_resp==\"false\", \"outbound\", local_orig==\"false\" and local_resp==\"false\", \"external\", local_orig==\"false\" and local_resp==\"true\", \"inbound\", \"unknown\")\n | extend \n url = strcat(\"http://\",host_header,uri),\n url_domain = host_header\n | project\n system_name,\n path,\n id_orig_h,\n id_orig_p,\n id_resp_h,\n id_resp_p,\n method,\n host,\n uid,\n uri,\n referrer,\n status_code,\n status_msg,\n request_body_len,\n response_body_len,\n tags,\n orig_mime_types,\n resp_mime_types,\n post_body,\n orig_fuids,\n orig_filenames,\n resp_fuids,\n resp_filenames,\n version,\n user_agent,\n username,\n pwd,\n proxied,\n origin,\n info_code,\n info_msg,\n EventVendor,\n EventProduct,\n EventType,\n ts,\n dest_host,\n src,\n src_ip,\n src_port,\n dest,\n dest_ip,\n dest_port,\n http_method,\n bytes_in,\n bytes_out,\n status,\n vendor_action,\n uri_path,\n object,\n http_user_agent,\n http_referrer,\n http_content_type,\n sensor_name,\n http_version,\n http_username,\n http_user_agent_length,\n bytes,\n is_broadcast,\n is_dest_internal_ip,\n is_src_internal_ip,\n host_header,\n referrer_domain_domain,\n referrer_domain_ip,\n direction,\n url,\n url_domain,\n TimeGenerated\n };\n corelight_http_agg\n", + "functionParameters": "", + "version": 2, + "tags": [ + { + "name": "description", + "value": "" + } + ] + } + }, + { + "type": "Microsoft.OperationalInsights/workspaces/providers/metadata", + "apiVersion": "2022-01-01-preview", + "location": "[parameters('workspace-location')]", + "name": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat('Parser-', last(split(variables('parserObject120')._parserId120,'/'))))]", + "dependsOn": [ + "[variables('parserObject120')._parserId120]" + ], + "properties": { + "parentId": "[resourceId('Microsoft.OperationalInsights/workspaces/savedSearches', parameters('workspace'), 'corelight_http_agg')]", + "contentId": "[variables('parserObject120').parserContentId120]", + "kind": "Parser", + "version": "[variables('parserObject120').parserVersion120]", + "source": { + "kind": "Solution", + "name": "Corelight", + "sourceId": "[variables('_solutionId')]" + }, + "author": { + "name": "Corelight", + "email": "[variables('_email')]" + }, + "support": { + "name": "Corelight", + "tier": "Partner", + "link": "https://support.corelight.com/" + } + } + }, + { + "type": "Microsoft.OperationalInsights/workspaces/providers/contentTemplates", + "apiVersion": "2023-04-01-preview", + "name": "[variables('parserObject121').parserTemplateSpecName121]", + "location": "[parameters('workspace-location')]", + "dependsOn": [ + "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" + ], + "properties": { + "description": "corelight_ssl_agg Data Parser with template version 3.2.2", + "mainTemplate": { + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "[variables('parserObject121').parserVersion121]", + "parameters": {}, + "variables": {}, + "resources": [ + { + "name": "[variables('parserObject121')._parserName121]", + "apiVersion": "2022-10-01", + "type": "Microsoft.OperationalInsights/workspaces/savedSearches", + "location": "[parameters('workspace-location')]", + "properties": { + "eTag": "*", + "displayName": "Corelight SSL Aggregated Events", + "category": "Microsoft Sentinel Parser", + "functionAlias": "corelight_ssl_agg", + "query": "let dummy_table = datatable(TimeGenerated: datetime, uid_s: string) [];\nlet corelight_ssl_agg = view () {\n union isfuzzy=true\n Corelight_v2_ssl_agg_CL,\n dummy_table\n | summarize arg_max(TimeGenerated, *) by uid_s\n | join kind=leftouter \n ( corelight_conn\n | project uid, local_orig, local_resp \n ) on $left.uid_s == $right.uid\n | project-away uid\n | extend\n path=column_ifexists(\"_path_s\", \"\"),\n system_name=column_ifexists(\"_system_name_s\", \"\"),\n uid=column_ifexists(\"uid_s\", \"\"),\n id_orig_h=column_ifexists(\"id_orig_h_s\", \"\"),\n id_orig_p=column_ifexists(\"id_orig_p_d\", real(null)),\n id_resp_h=column_ifexists(\"id_resp_h_s\", \"\"),\n id_resp_p=column_ifexists(\"id_resp_p_d\", real(null)),\n version=column_ifexists(\"version_s\", \"\"),\n cipher=column_ifexists(\"cipher_s\", \"\"),\n curve=column_ifexists(\"curve_s\", \"\"),\n established=column_ifexists(\"established_b\", \"\"),\n server_name=column_ifexists(\"server_name_s\", \"\"),\n next_protocol=column_ifexists(\"next_protocol_s\", \"\"),\n ssl_history=column_ifexists(\"ssl_history_s\", \"\"),\n cert_chain_fps=column_ifexists(\"cert_chain_fps_s\", \"\"),\n client_cert_chain_fps=column_ifexists(\"client_cert_chain_fps_s\", \"\"),\n validation_status=column_ifexists(\"validation_status_s\", \"\"),\n ja3=column_ifexists(\"ja3_s\", \"\"),\n ja3s=column_ifexists(\"ja3s_s\", \"\"),\n resumed=column_ifexists(\"resumed_b\", \"\"),\n sni_matches_cert=column_ifexists(\"sni_matches_cert_b\", \"\")\n | extend\n EventVendor=\"Corelight\",\n EventProduct=\"CorelightSensor\",\n EventType=\"ssl_agg\",\n ts = TimeGenerated,\n src=id_orig_h,\n src_ip=id_orig_h,\n src_port=id_orig_p,\n dest=id_resp_h,\n dest_ip=id_resp_h,\n dest_port=id_resp_p,\n ssl_cipher=cipher,\n ssl_curve=curve,\n ssl_subject_common_name=server_name,\n fingerprint=cert_chain_fps,\n is_self_signed = iff(validation_status==\"self signed certificate\", \"yes\", \"no\"),\n action = iff(established==\"true\",\"success\",\"failure\"),\n sensor_name = coalesce(system_name, \"unknown\"),\n signature=validation_status,\n ssl_version = version\n | extend\n is_broadcast = iff(src in(\"0.0.0.0\", \"255.255.255.255\") or dest in(\"255.255.255.255\", \"0.0.0.0\"),\"true\",\"false\"),\n is_src_internal_ip = iff(local_orig == true, \"true\", \"false\"),\n is_dest_internal_ip = iff(local_resp == true, \"true\", \"false\"),\n direction=case(local_orig==\"true\" and local_resp==\"true\", \"internal\", local_orig==\"true\" and local_resp==\"false\", \"outbound\", local_orig==\"false\" and local_resp==\"false\", \"external\", local_orig==\"false\" and local_resp==\"true\", \"inbound\", \"unknown\")\n | project\n path,\n system_name,\n uid,\n id_orig_h,\n id_orig_p,\n id_resp_h,\n id_resp_p,\n version,\n cipher,\n curve,\n established,\n server_name,\n next_protocol,\n ssl_history,\n cert_chain_fps,\n client_cert_chain_fps,\n validation_status,\n ja3,\n ja3s,\n resumed,\n sni_matches_cert,\n EventVendor,\n EventProduct,\n EventType,\n ts,\n src,\n src_ip,\n src_port,\n dest,\n dest_ip,\n dest_port,\n ssl_cipher,\n ssl_curve,\n ssl_subject_common_name,\n fingerprint,\n is_self_signed,\n action,\n sensor_name,\n signature,\n ssl_version,\n is_broadcast,\n is_src_internal_ip,\n is_dest_internal_ip,\n direction,\n TimeGenerated\n};\ncorelight_ssl_agg\n", + "functionParameters": "", + "version": 2, + "tags": [ + { + "name": "description", + "value": "" + } + ] + } + }, + { + "type": "Microsoft.OperationalInsights/workspaces/providers/metadata", + "apiVersion": "2022-01-01-preview", + "name": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat('Parser-', last(split(variables('parserObject121')._parserId121,'/'))))]", + "dependsOn": [ + "[variables('parserObject121')._parserId121]" + ], + "properties": { + "parentId": "[resourceId('Microsoft.OperationalInsights/workspaces/savedSearches', parameters('workspace'), 'corelight_ssl_agg')]", + "contentId": "[variables('parserObject121').parserContentId121]", + "kind": "Parser", + "version": "[variables('parserObject121').parserVersion121]", + "source": { + "name": "Corelight", + "kind": "Solution", + "sourceId": "[variables('_solutionId')]" + }, + "author": { + "name": "Corelight", + "email": "[variables('_email')]" + }, + "support": { + "name": "Corelight", + "tier": "Partner", + "link": "https://support.corelight.com/" + } + } + } + ] + }, + "packageKind": "Solution", + "packageVersion": "[variables('_solutionVersion')]", + "packageName": "[variables('_solutionName')]", + "packageId": "[variables('_solutionId')]", + "contentSchemaVersion": "3.0.0", + "contentId": "[variables('parserObject121').parserContentId121]", + "contentKind": "Parser", + "displayName": "Corelight SSL Aggregated Events", + "contentProductId": "[concat(take(variables('_solutionId'),50),'-','pr','-', uniqueString(concat(variables('_solutionId'),'-','Parser','-',variables('parserObject121').parserContentId121,'-', '1.1.0')))]", + "id": "[concat(take(variables('_solutionId'),50),'-','pr','-', uniqueString(concat(variables('_solutionId'),'-','Parser','-',variables('parserObject121').parserContentId121,'-', '1.1.0')))]", + "version": "[variables('parserObject121').parserVersion121]" + } + }, + { + "type": "Microsoft.OperationalInsights/workspaces/savedSearches", + "apiVersion": "2022-10-01", + "name": "[variables('parserObject121')._parserName121]", + "location": "[parameters('workspace-location')]", + "properties": { + "eTag": "*", + "displayName": "Corelight SSL Aggregated Events", + "category": "Microsoft Sentinel Parser", + "functionAlias": "corelight_ssl_agg", + "query": "let dummy_table = datatable(TimeGenerated: datetime, uid_s: string) [];\nlet corelight_ssl_agg = view () {\n union isfuzzy=true\n Corelight_v2_ssl_agg_CL,\n dummy_table\n | summarize arg_max(TimeGenerated, *) by uid_s\n | join kind=leftouter \n ( corelight_conn\n | project uid, local_orig, local_resp \n ) on $left.uid_s == $right.uid\n | project-away uid\n | extend\n path=column_ifexists(\"_path_s\", \"\"),\n system_name=column_ifexists(\"_system_name_s\", \"\"),\n uid=column_ifexists(\"uid_s\", \"\"),\n id_orig_h=column_ifexists(\"id_orig_h_s\", \"\"),\n id_orig_p=column_ifexists(\"id_orig_p_d\", real(null)),\n id_resp_h=column_ifexists(\"id_resp_h_s\", \"\"),\n id_resp_p=column_ifexists(\"id_resp_p_d\", real(null)),\n version=column_ifexists(\"version_s\", \"\"),\n cipher=column_ifexists(\"cipher_s\", \"\"),\n curve=column_ifexists(\"curve_s\", \"\"),\n established=column_ifexists(\"established_b\", \"\"),\n server_name=column_ifexists(\"server_name_s\", \"\"),\n next_protocol=column_ifexists(\"next_protocol_s\", \"\"),\n ssl_history=column_ifexists(\"ssl_history_s\", \"\"),\n cert_chain_fps=column_ifexists(\"cert_chain_fps_s\", \"\"),\n client_cert_chain_fps=column_ifexists(\"client_cert_chain_fps_s\", \"\"),\n validation_status=column_ifexists(\"validation_status_s\", \"\"),\n ja3=column_ifexists(\"ja3_s\", \"\"),\n ja3s=column_ifexists(\"ja3s_s\", \"\"),\n resumed=column_ifexists(\"resumed_b\", \"\"),\n sni_matches_cert=column_ifexists(\"sni_matches_cert_b\", \"\")\n | extend\n EventVendor=\"Corelight\",\n EventProduct=\"CorelightSensor\",\n EventType=\"ssl_agg\",\n ts = TimeGenerated,\n src=id_orig_h,\n src_ip=id_orig_h,\n src_port=id_orig_p,\n dest=id_resp_h,\n dest_ip=id_resp_h,\n dest_port=id_resp_p,\n ssl_cipher=cipher,\n ssl_curve=curve,\n ssl_subject_common_name=server_name,\n fingerprint=cert_chain_fps,\n is_self_signed = iff(validation_status==\"self signed certificate\", \"yes\", \"no\"),\n action = iff(established==\"true\",\"success\",\"failure\"),\n sensor_name = coalesce(system_name, \"unknown\"),\n signature=validation_status,\n ssl_version = version\n | extend\n is_broadcast = iff(src in(\"0.0.0.0\", \"255.255.255.255\") or dest in(\"255.255.255.255\", \"0.0.0.0\"),\"true\",\"false\"),\n is_src_internal_ip = iff(local_orig == true, \"true\", \"false\"),\n is_dest_internal_ip = iff(local_resp == true, \"true\", \"false\"),\n direction=case(local_orig==\"true\" and local_resp==\"true\", \"internal\", local_orig==\"true\" and local_resp==\"false\", \"outbound\", local_orig==\"false\" and local_resp==\"false\", \"external\", local_orig==\"false\" and local_resp==\"true\", \"inbound\", \"unknown\")\n | project\n path,\n system_name,\n uid,\n id_orig_h,\n id_orig_p,\n id_resp_h,\n id_resp_p,\n version,\n cipher,\n curve,\n established,\n server_name,\n next_protocol,\n ssl_history,\n cert_chain_fps,\n client_cert_chain_fps,\n validation_status,\n ja3,\n ja3s,\n resumed,\n sni_matches_cert,\n EventVendor,\n EventProduct,\n EventType,\n ts,\n src,\n src_ip,\n src_port,\n dest,\n dest_ip,\n dest_port,\n ssl_cipher,\n ssl_curve,\n ssl_subject_common_name,\n fingerprint,\n is_self_signed,\n action,\n sensor_name,\n signature,\n ssl_version,\n is_broadcast,\n is_src_internal_ip,\n is_dest_internal_ip,\n direction,\n TimeGenerated\n};\ncorelight_ssl_agg\n", + "functionParameters": "", + "version": 2, + "tags": [ + { + "name": "description", + "value": "" + } + ] + } + }, + { + "type": "Microsoft.OperationalInsights/workspaces/providers/metadata", + "apiVersion": "2022-01-01-preview", + "location": "[parameters('workspace-location')]", + "name": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat('Parser-', last(split(variables('parserObject121')._parserId121,'/'))))]", + "dependsOn": [ + "[variables('parserObject121')._parserId121]" + ], + "properties": { + "parentId": "[resourceId('Microsoft.OperationalInsights/workspaces/savedSearches', parameters('workspace'), 'corelight_ssl_agg')]", + "contentId": "[variables('parserObject121').parserContentId121]", + "kind": "Parser", + "version": "[variables('parserObject121').parserVersion121]", + "source": { + "kind": "Solution", + "name": "Corelight", + "sourceId": "[variables('_solutionId')]" + }, + "author": { + "name": "Corelight", + "email": "[variables('_email')]" + }, + "support": { + "name": "Corelight", + "tier": "Partner", + "link": "https://support.corelight.com/" + } + } + }, + { + "type": "Microsoft.OperationalInsights/workspaces/providers/contentTemplates", + "apiVersion": "2023-04-01-preview", + "name": "[variables('parserObject122').parserTemplateSpecName122]", + "location": "[parameters('workspace-location')]", + "dependsOn": [ + "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" + ], + "properties": { + "description": "corelight_weird_agg Data Parser with template version 3.2.2", + "mainTemplate": { + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "[variables('parserObject122').parserVersion122]", + "parameters": {}, + "variables": {}, + "resources": [ + { + "name": "[variables('parserObject122')._parserName122]", + "apiVersion": "2022-10-01", + "type": "Microsoft.OperationalInsights/workspaces/savedSearches", + "location": "[parameters('workspace-location')]", + "properties": { + "eTag": "*", + "displayName": "Corelight WEIRD Aggregated Events", + "category": "Microsoft Sentinel Parser", + "functionAlias": "corelight_weird_agg", + "query": "let dummy_table = datatable(TimeGenerated: datetime, uid_s: string) [];\nlet corelight_weird_agg = view () {\n union isfuzzy=true\n Corelight_v2_weird_agg_CL,\n dummy_table\n | summarize arg_max(TimeGenerated, *) by uid_s\n | extend\n path=column_ifexists(\"_path_s\", \"\"),\n system_name=column_ifexists(\"_system_name_s\", \"\"),\n id_orig_h=column_ifexists(\"id_orig_h_s\", \"\"),\n id_orig_p=column_ifexists(\"id_orig_p_d\", real(null)),\n id_resp_h=column_ifexists(\"id_resp_h_s\", \"\"),\n id_resp_p=column_ifexists(\"id_resp_p_d\", real(null)),\n name=column_ifexists(\"name_s\", \"\"),\n uid=column_ifexists(\"uid_s\", \"\"),\n addl=column_ifexists(\"addl_s\", \"\"),\n notice=column_ifexists(\"notice_b\", \"\"),\n peer=column_ifexists(\"peer_s\", \"\"),\n source=column_ifexists(\"source_s\", \"\")\n | extend\n EventVendor=\"Corelight\",\n EventProduct=\"CorelightSensor\",\n EventType=\"weird_agg\",\n ts = TimeGenerated\n | project\n path,\n system_name,\n id_orig_h,\n id_orig_p,\n id_resp_h,\n id_resp_p,\n name,\n uid,\n addl,\n notice,\n peer,\n source,\n EventVendor,\n EventProduct,\n EventType,\n ts,\n TimeGenerated\n};\ncorelight_weird_agg\n", + "functionParameters": "", + "version": 2, + "tags": [ + { + "name": "description", + "value": "" + } + ] + } + }, + { + "type": "Microsoft.OperationalInsights/workspaces/providers/metadata", + "apiVersion": "2022-01-01-preview", + "name": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat('Parser-', last(split(variables('parserObject122')._parserId122,'/'))))]", + "dependsOn": [ + "[variables('parserObject122')._parserId122]" + ], + "properties": { + "parentId": "[resourceId('Microsoft.OperationalInsights/workspaces/savedSearches', parameters('workspace'), 'corelight_weird_agg')]", + "contentId": "[variables('parserObject122').parserContentId122]", + "kind": "Parser", + "version": "[variables('parserObject122').parserVersion122]", + "source": { + "name": "Corelight", + "kind": "Solution", + "sourceId": "[variables('_solutionId')]" + }, + "author": { + "name": "Corelight", + "email": "[variables('_email')]" + }, + "support": { + "name": "Corelight", + "tier": "Partner", + "link": "https://support.corelight.com/" + } + } + } + ] + }, + "packageKind": "Solution", + "packageVersion": "[variables('_solutionVersion')]", + "packageName": "[variables('_solutionName')]", + "packageId": "[variables('_solutionId')]", + "contentSchemaVersion": "3.0.0", + "contentId": "[variables('parserObject122').parserContentId122]", + "contentKind": "Parser", + "displayName": "Corelight WEIRD Aggregated Events", + "contentProductId": "[concat(take(variables('_solutionId'),50),'-','pr','-', uniqueString(concat(variables('_solutionId'),'-','Parser','-',variables('parserObject122').parserContentId122,'-', '1.1.0')))]", + "id": "[concat(take(variables('_solutionId'),50),'-','pr','-', uniqueString(concat(variables('_solutionId'),'-','Parser','-',variables('parserObject122').parserContentId122,'-', '1.1.0')))]", + "version": "[variables('parserObject122').parserVersion122]" + } + }, + { + "type": "Microsoft.OperationalInsights/workspaces/savedSearches", + "apiVersion": "2022-10-01", + "name": "[variables('parserObject122')._parserName122]", + "location": "[parameters('workspace-location')]", + "properties": { + "eTag": "*", + "displayName": "Corelight WEIRD Aggregated Events", + "category": "Microsoft Sentinel Parser", + "functionAlias": "corelight_weird_agg", + "query": "let dummy_table = datatable(TimeGenerated: datetime, uid_s: string) [];\nlet corelight_weird_agg = view () {\n union isfuzzy=true\n Corelight_v2_weird_agg_CL,\n dummy_table\n | summarize arg_max(TimeGenerated, *) by uid_s\n | extend\n path=column_ifexists(\"_path_s\", \"\"),\n system_name=column_ifexists(\"_system_name_s\", \"\"),\n id_orig_h=column_ifexists(\"id_orig_h_s\", \"\"),\n id_orig_p=column_ifexists(\"id_orig_p_d\", real(null)),\n id_resp_h=column_ifexists(\"id_resp_h_s\", \"\"),\n id_resp_p=column_ifexists(\"id_resp_p_d\", real(null)),\n name=column_ifexists(\"name_s\", \"\"),\n uid=column_ifexists(\"uid_s\", \"\"),\n addl=column_ifexists(\"addl_s\", \"\"),\n notice=column_ifexists(\"notice_b\", \"\"),\n peer=column_ifexists(\"peer_s\", \"\"),\n source=column_ifexists(\"source_s\", \"\")\n | extend\n EventVendor=\"Corelight\",\n EventProduct=\"CorelightSensor\",\n EventType=\"weird_agg\",\n ts = TimeGenerated\n | project\n path,\n system_name,\n id_orig_h,\n id_orig_p,\n id_resp_h,\n id_resp_p,\n name,\n uid,\n addl,\n notice,\n peer,\n source,\n EventVendor,\n EventProduct,\n EventType,\n ts,\n TimeGenerated\n};\ncorelight_weird_agg\n", + "functionParameters": "", + "version": 2, + "tags": [ + { + "name": "description", + "value": "" + } + ] + } + }, + { + "type": "Microsoft.OperationalInsights/workspaces/providers/metadata", + "apiVersion": "2022-01-01-preview", + "location": "[parameters('workspace-location')]", + "name": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat('Parser-', last(split(variables('parserObject122')._parserId122,'/'))))]", + "dependsOn": [ + "[variables('parserObject122')._parserId122]" + ], + "properties": { + "parentId": "[resourceId('Microsoft.OperationalInsights/workspaces/savedSearches', parameters('workspace'), 'corelight_weird_agg')]", + "contentId": "[variables('parserObject122').parserContentId122]", + "kind": "Parser", + "version": "[variables('parserObject122').parserVersion122]", + "source": { + "kind": "Solution", + "name": "Corelight", + "sourceId": "[variables('_solutionId')]" + }, + "author": { + "name": "Corelight", + "email": "[variables('_email')]" + }, + "support": { + "name": "Corelight", + "tier": "Partner", + "link": "https://support.corelight.com/" + } + } + }, + { + "type": "Microsoft.OperationalInsights/workspaces/providers/contentTemplates", + "apiVersion": "2023-04-01-preview", + "name": "[variables('huntingQueryObject1').huntingQueryTemplateSpecName1]", + "location": "[parameters('workspace-location')]", + "dependsOn": [ + "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" + ], + "properties": { + "description": "CorelightAbnormalEmailSubject_HuntingQueries Hunting Query with template version 3.2.2", + "mainTemplate": { + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "[variables('huntingQueryObject1').huntingQueryVersion1]", + "parameters": {}, + "variables": {}, + "resources": [ + { + "type": "Microsoft.OperationalInsights/savedSearches", + "apiVersion": "2022-10-01", + "name": "Corelight_Hunting_Query_1", + "location": "[parameters('workspace-location')]", + "properties": { + "eTag": "*", + "displayName": "Corelight - Abnormal Email Subject", + "category": "Hunting Queries", + "query": "corelight_smtp\n| where subject hasprefix @'\\=?utf-16'\n", + "version": 2, + "tags": [ + { + "name": "description", + "value": "Query searches for emails with NON-Ascii characters within the Subject ." + }, + { + "name": "tactics", + "value": "InitialAccess" + }, + { + "name": "techniques", + "value": "T1566" + } + ] + } + }, + { + "type": "Microsoft.OperationalInsights/workspaces/providers/metadata", + "apiVersion": "2022-01-01-preview", + "name": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat('HuntingQuery-', last(split(resourceId('Microsoft.OperationalInsights/savedSearches', variables('huntingQueryObject1')._huntingQuerycontentId1),'/'))))]", + "properties": { + "description": "Corelight Hunting Query 1", + "parentId": "[resourceId('Microsoft.OperationalInsights/savedSearches', variables('huntingQueryObject1')._huntingQuerycontentId1)]", + "contentId": "[variables('huntingQueryObject1')._huntingQuerycontentId1]", + "kind": "HuntingQuery", + "version": "[variables('huntingQueryObject1').huntingQueryVersion1]", + "source": { + "kind": "Solution", + "name": "Corelight", + "sourceId": "[variables('_solutionId')]" + }, + "author": { + "name": "Corelight", + "email": "[variables('_email')]" + }, + "support": { + "name": "Corelight", + "tier": "Partner", + "link": "https://support.corelight.com/" + } + } + } + ] + }, + "packageKind": "Solution", + "packageVersion": "[variables('_solutionVersion')]", + "packageName": "[variables('_solutionName')]", + "packageId": "[variables('_solutionId')]", + "contentSchemaVersion": "3.0.0", + "contentId": "[variables('huntingQueryObject1')._huntingQuerycontentId1]", + "contentKind": "HuntingQuery", + "displayName": "Corelight - Abnormal Email Subject", + "contentProductId": "[concat(take(variables('_solutionId'),50),'-','hq','-', uniqueString(concat(variables('_solutionId'),'-','HuntingQuery','-',variables('huntingQueryObject1')._huntingQuerycontentId1,'-', '1.0.0')))]", + "id": "[concat(take(variables('_solutionId'),50),'-','hq','-', uniqueString(concat(variables('_solutionId'),'-','HuntingQuery','-',variables('huntingQueryObject1')._huntingQuerycontentId1,'-', '1.0.0')))]", + "version": "1.0.0" + } + }, + { + "type": "Microsoft.OperationalInsights/workspaces/providers/contentTemplates", + "apiVersion": "2023-04-01-preview", + "name": "[variables('huntingQueryObject2').huntingQueryTemplateSpecName2]", + "location": "[parameters('workspace-location')]", + "dependsOn": [ + "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" + ], + "properties": { + "description": "CorelightCompressedFilesTransferredOverHTTP_HuntingQueries Hunting Query with template version 3.2.2", + "mainTemplate": { + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "[variables('huntingQueryObject2').huntingQueryVersion2]", + "parameters": {}, + "variables": {}, + "resources": [ + { + "type": "Microsoft.OperationalInsights/savedSearches", + "apiVersion": "2022-10-01", + "name": "Corelight_Hunting_Query_2", + "location": "[parameters('workspace-location')]", + "properties": { + "eTag": "*", + "displayName": "Corelight - Compressed Files Transferred over HTTP", + "category": "Hunting Queries", + "query": "corelight_http\n| where TimeGenerated > ago(24h)\n| where method in~ ('POST', 'PUT')\n| where orig_mime_types has_any ('application/vnd.ms-cab-compressed', 'application/warc', 'application/x-7z-compressed', 'application/x-ace', 'application/x-arc', 'application/x-archive', 'application/x-arj', 'application/x-compress', 'application/x-cpio', 'application/x-dmg', 'application/x-eet', 'application/x-gzip', 'application/x-lha', 'application/x-lrzip', 'application/x-lz4', 'application/x-lzma', 'application/x-lzh', 'application/x-lzip', 'application/x-rar', 'application/x-rpm', 'application/x-stuffit', 'application/x-tar', 'application/x-xz', 'application/x-zoo', 'application/zip')\n| summarize count() by id_orig_h\n| top 10 by count_\n", + "version": 2, + "tags": [ + { + "name": "description", + "value": "Query searches for top sources which transferred compressed archives over HTTP." + }, + { + "name": "tactics", + "value": "Exfiltration" + }, + { + "name": "techniques", + "value": "T1560" + } + ] + } + }, + { + "type": "Microsoft.OperationalInsights/workspaces/providers/metadata", + "apiVersion": "2022-01-01-preview", + "name": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat('HuntingQuery-', last(split(resourceId('Microsoft.OperationalInsights/savedSearches', variables('huntingQueryObject2')._huntingQuerycontentId2),'/'))))]", + "properties": { + "description": "Corelight Hunting Query 2", + "parentId": "[resourceId('Microsoft.OperationalInsights/savedSearches', variables('huntingQueryObject2')._huntingQuerycontentId2)]", + "contentId": "[variables('huntingQueryObject2')._huntingQuerycontentId2]", + "kind": "HuntingQuery", + "version": "[variables('huntingQueryObject2').huntingQueryVersion2]", + "source": { + "kind": "Solution", + "name": "Corelight", + "sourceId": "[variables('_solutionId')]" + }, + "author": { + "name": "Corelight", + "email": "[variables('_email')]" + }, + "support": { + "name": "Corelight", + "tier": "Partner", + "link": "https://support.corelight.com/" + } + } + } + ] + }, + "packageKind": "Solution", + "packageVersion": "[variables('_solutionVersion')]", + "packageName": "[variables('_solutionName')]", + "packageId": "[variables('_solutionId')]", + "contentSchemaVersion": "3.0.0", + "contentId": "[variables('huntingQueryObject2')._huntingQuerycontentId2]", + "contentKind": "HuntingQuery", + "displayName": "Corelight - Compressed Files Transferred over HTTP", + "contentProductId": "[concat(take(variables('_solutionId'),50),'-','hq','-', uniqueString(concat(variables('_solutionId'),'-','HuntingQuery','-',variables('huntingQueryObject2')._huntingQuerycontentId2,'-', '1.0.0')))]", + "id": "[concat(take(variables('_solutionId'),50),'-','hq','-', uniqueString(concat(variables('_solutionId'),'-','HuntingQuery','-',variables('huntingQueryObject2')._huntingQuerycontentId2,'-', '1.0.0')))]", + "version": "1.0.0" + } + }, + { + "type": "Microsoft.OperationalInsights/workspaces/providers/contentTemplates", + "apiVersion": "2023-04-01-preview", + "name": "[variables('huntingQueryObject3').huntingQueryTemplateSpecName3]", + "location": "[parameters('workspace-location')]", + "dependsOn": [ + "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" + ], + "properties": { + "description": "CorelightDataTransferedByIp_HuntingQueries Hunting Query with template version 3.2.2", + "mainTemplate": { + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "[variables('huntingQueryObject3').huntingQueryVersion3]", + "parameters": {}, + "variables": {}, + "resources": [ + { + "type": "Microsoft.OperationalInsights/savedSearches", + "apiVersion": "2022-10-01", + "name": "Corelight_Hunting_Query_3", + "location": "[parameters('workspace-location')]", + "properties": { + "eTag": "*", + "displayName": "Corelight - Top sources of data transferred", + "category": "Hunting Queries", + "query": "corelight_http\n| where TimeGenerated > ago(24h)\n| where method in~ ('POST', 'PUT')\n| summarize data_sent = sum(tolong(request_body_len)) by id_orig_h\n| top 10 by data_sent\n", + "version": 2, + "tags": [ + { + "name": "description", + "value": "Query searches for top sources by transferred data over period of time." + }, + { + "name": "tactics", + "value": "Exfiltration" + }, + { + "name": "techniques", + "value": "T1560" + } + ] + } + }, + { + "type": "Microsoft.OperationalInsights/workspaces/providers/metadata", + "apiVersion": "2022-01-01-preview", + "name": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat('HuntingQuery-', last(split(resourceId('Microsoft.OperationalInsights/savedSearches', variables('huntingQueryObject3')._huntingQuerycontentId3),'/'))))]", + "properties": { + "description": "Corelight Hunting Query 3", + "parentId": "[resourceId('Microsoft.OperationalInsights/savedSearches', variables('huntingQueryObject3')._huntingQuerycontentId3)]", + "contentId": "[variables('huntingQueryObject3')._huntingQuerycontentId3]", + "kind": "HuntingQuery", + "version": "[variables('huntingQueryObject3').huntingQueryVersion3]", + "source": { + "kind": "Solution", + "name": "Corelight", + "sourceId": "[variables('_solutionId')]" + }, + "author": { + "name": "Corelight", + "email": "[variables('_email')]" + }, + "support": { + "name": "Corelight", + "tier": "Partner", + "link": "https://support.corelight.com/" + } + } + } + ] + }, + "packageKind": "Solution", + "packageVersion": "[variables('_solutionVersion')]", + "packageName": "[variables('_solutionName')]", + "packageId": "[variables('_solutionId')]", + "contentSchemaVersion": "3.0.0", + "contentId": "[variables('huntingQueryObject3')._huntingQuerycontentId3]", + "contentKind": "HuntingQuery", + "displayName": "Corelight - Top sources of data transferred", + "contentProductId": "[concat(take(variables('_solutionId'),50),'-','hq','-', uniqueString(concat(variables('_solutionId'),'-','HuntingQuery','-',variables('huntingQueryObject3')._huntingQuerycontentId3,'-', '1.0.0')))]", + "id": "[concat(take(variables('_solutionId'),50),'-','hq','-', uniqueString(concat(variables('_solutionId'),'-','HuntingQuery','-',variables('huntingQueryObject3')._huntingQuerycontentId3,'-', '1.0.0')))]", + "version": "1.0.0" + } + }, + { + "type": "Microsoft.OperationalInsights/workspaces/providers/contentTemplates", + "apiVersion": "2023-04-01-preview", + "name": "[variables('huntingQueryObject4').huntingQueryTemplateSpecName4]", + "location": "[parameters('workspace-location')]", + "dependsOn": [ + "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" + ], + "properties": { + "description": "CorelightExternalServices_HuntingQueries Hunting Query with template version 3.2.2", + "mainTemplate": { + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "[variables('huntingQueryObject4').huntingQueryVersion4]", + "parameters": {}, + "variables": {}, + "resources": [ + { + "type": "Microsoft.OperationalInsights/savedSearches", + "apiVersion": "2022-10-01", + "name": "Corelight_Hunting_Query_4", + "location": "[parameters('workspace-location')]", "properties": { "eTag": "*", "displayName": "Corelight - External Facing Services", @@ -19478,7 +20300,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "CorelightFilesSeen_HuntingQueries Hunting Query with template version 3.2.1", + "description": "CorelightFilesSeen_HuntingQueries Hunting Query with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('huntingQueryObject5').huntingQueryVersion5]", @@ -19562,7 +20384,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "CorelightFilesTransferedByIp_HuntingQueries Hunting Query with template version 3.2.1", + "description": "CorelightFilesTransferedByIp_HuntingQueries Hunting Query with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('huntingQueryObject6').huntingQueryVersion6]", @@ -19646,7 +20468,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "CorelightMultipleRemoteSMBConnectionsFromSingleIP_HuntingQueries Hunting Query with template version 3.2.1", + "description": "CorelightMultipleRemoteSMBConnectionsFromSingleIP_HuntingQueries Hunting Query with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('huntingQueryObject7').huntingQueryVersion7]", @@ -19730,7 +20552,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "CorelightObfuscatedBinary_HuntingQueries Hunting Query with template version 3.2.1", + "description": "CorelightObfuscatedBinary_HuntingQueries Hunting Query with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('huntingQueryObject8').huntingQueryVersion8]", @@ -19814,7 +20636,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "CorelightRarePOST_HuntingQueries Hunting Query with template version 3.2.1", + "description": "CorelightRarePOST_HuntingQueries Hunting Query with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('huntingQueryObject9').huntingQueryVersion9]", @@ -19898,7 +20720,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "CorelightRepetitiveDnsFailures_HuntingQueries Hunting Query with template version 3.2.1", + "description": "CorelightRepetitiveDnsFailures_HuntingQueries Hunting Query with template version 3.2.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('huntingQueryObject10').huntingQueryVersion10]", @@ -19978,12 +20800,12 @@ "apiVersion": "2023-04-01-preview", "location": "[parameters('workspace-location')]", "properties": { - "version": "3.2.1", + "version": "3.2.2", "kind": "Solution", "contentSchemaVersion": "3.0.0", "displayName": "Corelight", "publisherDisplayName": "Corelight", - "descriptionHtml": "

Note: Please refer to the following before installing the solution:

\n

• Review the solution Release Notes

\n

• There may be known issues pertaining to this Solution, please refer to them before installing.

\n

The Corelight solution provides the capability to ingest events from Zeek and Suricata via Corelight Sensors into Microsoft Sentinel.

\n

Underlying Microsoft Technologies used:

\n

This solution takes a dependency on the following technologies, and some of these dependencies either may be in Preview state or might result in additional ingestion or operational costs:

\n
    \n
  1. Agent based logs collection from Windows and Linux machines
  2. \n
\n

Data Connectors: 1, Parsers: 116, Workbooks: 5, Analytic Rules: 10, Hunting Queries: 10, Watchlists: 4

\n

Learn more about Microsoft Sentinel | Learn more about Solutions

\n", + "descriptionHtml": "

Note: Please refer to the following before installing the solution:

\n

• Review the solution Release Notes

\n

• There may be known issues pertaining to this Solution, please refer to them before installing.

\n

The Corelight solution provides the capability to ingest events from Zeek and Suricata via Corelight Sensors into Microsoft Sentinel.

\n

Underlying Microsoft Technologies used:

\n

This solution takes a dependency on the following technologies, and some of these dependencies either may be in Preview state or might result in additional ingestion or operational costs:

\n
    \n
  1. Agent based logs collection from Windows and Linux machines
  2. \n
\n

Data Connectors: 1, Parsers: 122, Workbooks: 5, Analytic Rules: 10, Hunting Queries: 10, Watchlists: 4

\n

Learn more about Microsoft Sentinel | Learn more about Solutions

\n", "contentKind": "Solution", "contentProductId": "[variables('_solutioncontentProductId')]", "id": "[variables('_solutioncontentProductId')]", @@ -20035,22 +20857,22 @@ { "kind": "Watchlist", "contentId": "[variables('_CorelightAggregationsEnrichment1')]", - "version": "3.2.1" + "version": "3.2.2" }, { "kind": "Watchlist", "contentId": "[variables('_CorelightAggregationsEnrichment2')]", - "version": "3.2.1" + "version": "3.2.2" }, { "kind": "Watchlist", "contentId": "[variables('_CorelightDNSPortDesc')]", - "version": "3.2.1" + "version": "3.2.2" }, { "kind": "Watchlist", "contentId": "[variables('_CorelightInferencesDesc')]", - "version": "3.2.1" + "version": "3.2.2" }, { "kind": "AnalyticsRule", @@ -20687,6 +21509,36 @@ "contentId": "[variables('parserObject116').parserContentId116]", "version": "[variables('parserObject116').parserVersion116]" }, + { + "kind": "Parser", + "contentId": "[variables('parserObject117').parserContentId117]", + "version": "[variables('parserObject117').parserVersion117]" + }, + { + "kind": "Parser", + "contentId": "[variables('parserObject118').parserContentId118]", + "version": "[variables('parserObject118').parserVersion118]" + }, + { + "kind": "Parser", + "contentId": "[variables('parserObject119').parserContentId119]", + "version": "[variables('parserObject119').parserVersion119]" + }, + { + "kind": "Parser", + "contentId": "[variables('parserObject120').parserContentId120]", + "version": "[variables('parserObject120').parserVersion120]" + }, + { + "kind": "Parser", + "contentId": "[variables('parserObject121').parserContentId121]", + "version": "[variables('parserObject121').parserVersion121]" + }, + { + "kind": "Parser", + "contentId": "[variables('parserObject122').parserContentId122]", + "version": "[variables('parserObject122').parserVersion122]" + }, { "kind": "HuntingQuery", "contentId": "[variables('huntingQueryObject1')._huntingQuerycontentId1]", diff --git a/Solutions/Corelight/Parsers/corelight_conn_agg.yaml b/Solutions/Corelight/Parsers/corelight_conn_agg.yaml new file mode 100644 index 00000000000..06845b386ba --- /dev/null +++ b/Solutions/Corelight/Parsers/corelight_conn_agg.yaml @@ -0,0 +1,236 @@ +id: 13f9b742-0060-4920-92fa-37942f2b157f +Function: + Title: Corelight Connection Aggregated Events + Version: '1.1.0' + LastUpdated: '2025-11-26' +Category: Microsoft Sentinel Parser +FunctionName: corelight_conn_agg +FunctionAlias: corelight_conn_agg +FunctionQuery: | + let ConnStateLookup = datatable( + conn_state: string, + conn_state_desc: string, + action: string + )[ + "S0","Connection attempt seen, no reply.","teardown", + "S1","Connection established, not terminated.","allowed", + "SF","Normal establishment and termination.","allowed", + "REJ","Connection attempt rejected.","blocked", + "S2","Connection established and close attempt by originator seen (but no reply from responder).","allowed", + "S3","Connection established and close attempt by responder seen (but no reply from originator).","allowed", + "RSTO","Connection established, originator aborted (sent a RST).","allowed", + "RSTR","Established, responder aborted.","allowed", + "RSTOS0","Originator sent a SYN followed by a RST, we never saw a SYN-ACK from the responder.","teardown", + "RSTRH","Responder sent a SYN ACK followed by a RST, we never saw a SYN from the (purported) originator.","teardown", + "SH","Originator sent a SYN followed by a FIN, we never saw a SYN ACK from the responder (hence the connection was 'half' open).","teardown", + "SHR","Responder sent a SYN ACK followed by a FIN, we never saw a SYN from the originator.","teardown", + "OTH","No SYN seen, just midstream traffic (a 'partial connection' that was not later closed).","allowed" + ]; + let dummy_table = datatable(TimeGenerated: datetime, uid_s: string) []; + let corelight_conn_agg = view () { + union isfuzzy=true + Corelight_v2_conn_agg_CL, + dummy_table + | summarize arg_max(TimeGenerated, *) by uid_s + | extend + path=column_ifexists("_path_s", ""), + system_name=column_ifexists("_system_name_s", ""), + id_orig_h=column_ifexists("id_orig_h_s", ""), + id_orig_p=column_ifexists("id_orig_p_d", real(null)), + id_resp_h=column_ifexists("id_resp_h_s", ""), + id_resp_p=column_ifexists("id_resp_p_d", real(null)), + proto=column_ifexists("proto_s", ""), + suri_ids=column_ifexists("suri_ids_s", ""), + local_orig=column_ifexists("local_orig_b", ""), + local_resp=column_ifexists("local_resp_b", ""), + id_orig_h_n=column_ifexists("id_orig_h_n_s", ""), + id_resp_h_n=column_ifexists("id_resp_h_n_s", ""), + write_ts=column_ifexists("_write_ts_t", datetime(null)), + uid=column_ifexists("uid_s", ""), + community_id=column_ifexists("community_id_s",""), + spcap_url=column_ifexists("spcap_url_s", ""), + service=column_ifexists("service_s", ""), + apps=column_ifexists("app_s", ""), + corelight_shunted=column_ifexists("corelight_shunted_b", ""), + duration=column_ifexists("duration_d", real(null)), + orig_bytes=column_ifexists("orig_bytes_d", real(null)), + resp_bytes=column_ifexists("resp_bytes_d", real(null)), + missed_bytes=column_ifexists("missed_bytes_d", real(null)), + orig_shunted_pkts=column_ifexists("orig_shunted_pkts_d", real(null)), + orig_shunted_bytes=column_ifexists("orig_shunted_bytes_d", real(null)), + resp_shunted_pkts=column_ifexists("resp_shunted_pkts_d", real(null)), + resp_shunted_bytes=column_ifexists("resp_shunted_bytes_d", real(null)), + orig_pkts=column_ifexists("orig_pkts_d", real(null)), + orig_ip_bytes=column_ifexists("orig_ip_bytes_d", real(null)), + resp_pkts=column_ifexists("resp_pkts_d", real(null)), + resp_ip_bytes=column_ifexists("resp_ip_bytes_d", real(null)), + conn_state=column_ifexists("conn_state_s", ""), + history=column_ifexists("history_s", ""), + tunnel_parents=column_ifexists("tunnel_parents_s", ""), + netskope_site_id=column_ifexists("netskope_site_id_s", ""), + netskope_user_id=column_ifexists("netskope_user_id_s", ""), + id_vlan=column_ifexists("id_vlan_d", real(null)), + vlan=column_ifexists("vlan_d", real(null)), + inner_vlan=column_ifexists("inner_vlan_d", real(null)), + orig_inst_org_id=column_ifexists("orig_inst_org_id_s", ""), + orig_inst_name=column_ifexists("orig_inst_name_s", ""), + orig_inst_az=column_ifexists("orig_inst_az_s", ""), + orig_inst_vpc_id=column_ifexists("orig_inst_vpc_id_s", ""), + orig_inst_subnet_id=column_ifexists("orig_inst_subnet_id_s", ""), + orig_inst_sg_ids=column_ifexists("orig_inst_sg_ids_s", ""), + orig_inst_project=column_ifexists("orig_inst_project_s", ""), + orig_inst_network=column_ifexists("orig_inst_network_s", ""), + orig_inst_network_tags=column_ifexists("orig_inst_network_tags_s", ""), + orig_inst_id=column_ifexists("orig_inst_id_s", ""), + orig_inst_resource_group=column_ifexists("orig_inst_resource_group_s", ""), + orig_inst_subscription=column_ifexists("orig_inst_subscription_s", ""), + orig_inst_os=column_ifexists("orig_inst_os_s", ""), + orig_inst_location=column_ifexists("orig_inst_location_s", ""), + orig_inst_nsg=column_ifexists("orig_inst_nsg_s", ""), + resp_inst_org_id=column_ifexists("resp_inst_org_id_s", ""), + resp_inst_name=column_ifexists("resp_inst_name_s", ""), + resp_inst_az=column_ifexists("resp_inst_az_s", ""), + resp_inst_vpc_id=column_ifexists("resp_inst_vpc_id_s", ""), + resp_inst_subnet_id=column_ifexists("resp_inst_subnet_id_s", ""), + resp_inst_sg_ids=column_ifexists("resp_inst_sg_ids_s", ""), + resp_inst_project=column_ifexists("resp_inst_project_s", ""), + resp_inst_network=column_ifexists("resp_inst_network_s", ""), + resp_inst_network_tags=column_ifexists("resp_inst_network_tags_s", ""), + resp_inst_id=column_ifexists("resp_inst_id_s", ""), + resp_inst_resource_group=column_ifexists("resp_inst_resource_group_s", ""), + resp_inst_subscription=column_ifexists("resp_inst_subscription_s", ""), + resp_inst_os=column_ifexists("resp_inst_os_s", ""), + resp_inst_location=column_ifexists("resp_inst_location_s", ""), + resp_inst_nsg=column_ifexists("resp_inst_nsg_s", "") + | lookup ConnStateLookup on conn_state + | extend + EventVendor = "Corelight", + EventProduct = "CorelightSensor", + EventType = "conn_agg", + ts = TimeGenerated, + src=id_orig_h, + src_ip=id_orig_h, + src_port=id_orig_p, + dest=id_resp_h, + dest_ip=id_resp_h, + dest_port=id_resp_p, + bytes_out=orig_ip_bytes, + packets_out=orig_pkts, + bytes_in=resp_ip_bytes, + packets_in=resp_pkts, + session_id=uid, + bytes=resp_ip_bytes + orig_ip_bytes, + sensor_name = coalesce(system_name, "unknown"), + transport=iff(proto=='icmp' and id_orig_h matches regex ".*:.*", "icmp6", proto), + app=split(service, ",") + | extend + is_dest_internal_ip = iff(local_resp == true, "true", "false"), + is_src_internal_ip = iff(local_orig == true, "true", "false"), + direction = case( + local_orig == "true" and local_resp == "true", + "internal", + local_orig == "true" and local_resp == "false", + "outbound", + local_orig == "false" and local_resp == "false", + "external", + local_orig == "false" and local_resp == "true", + "inbound", + "unknown" + ) + | project + TimeGenerated, + path, + system_name, + id_orig_h, + id_orig_p, + id_resp_h, + id_resp_p, + proto, + suri_ids, + local_orig, + local_resp, + id_orig_h_n, + id_resp_h_n, + write_ts, + uid, + community_id, + spcap_url, + service, + app, + apps, + corelight_shunted, + duration, + orig_bytes, + resp_bytes, + missed_bytes, + orig_shunted_pkts, + orig_shunted_bytes, + resp_shunted_pkts, + resp_shunted_bytes, + orig_pkts, + orig_ip_bytes, + resp_pkts, + resp_ip_bytes, + conn_state, + history, + tunnel_parents, + netskope_site_id, + netskope_user_id, + id_vlan, + vlan, + inner_vlan, + orig_inst_org_id, + orig_inst_name, + orig_inst_az, + orig_inst_vpc_id, + orig_inst_subnet_id, + orig_inst_sg_ids, + orig_inst_project, + orig_inst_network, + orig_inst_network_tags, + orig_inst_id, + orig_inst_resource_group, + orig_inst_subscription, + orig_inst_os, + orig_inst_location, + orig_inst_nsg, + resp_inst_org_id, + resp_inst_name, + resp_inst_az, + resp_inst_vpc_id, + resp_inst_subnet_id, + resp_inst_sg_ids, + resp_inst_project, + resp_inst_network, + resp_inst_network_tags, + resp_inst_id, + resp_inst_resource_group, + resp_inst_subscription, + resp_inst_os, + resp_inst_location, + resp_inst_nsg, + EventVendor, + EventProduct, + EventType, + ts, + is_dest_internal_ip, + is_src_internal_ip, + direction, + conn_state_desc, + action, + src, + src_ip, + src_port, + dest, + dest_ip, + dest_port, + bytes_out, + packets_out, + bytes_in, + packets_in, + session_id, + bytes, + sensor_name, + transport + }; + corelight_conn_agg \ No newline at end of file diff --git a/Solutions/Corelight/Parsers/corelight_dns_agg.yaml b/Solutions/Corelight/Parsers/corelight_dns_agg.yaml new file mode 100644 index 00000000000..d2dc1198ec3 --- /dev/null +++ b/Solutions/Corelight/Parsers/corelight_dns_agg.yaml @@ -0,0 +1,165 @@ +id: 69368767-52c7-4431-bc0c-80d7df14376b +Function: + Title: Corelight DNS Aggregated Events + Version: '1.1.0' + LastUpdated: '2025-11-26' +Category: Microsoft Sentinel Parser +FunctionName: corelight_dns_agg +FunctionAlias: corelight_dns_agg +FunctionQuery: | + let DNSLookup = datatable( + rcode: int, + reply_code: string, + cim_reply_code: string + )[ + 0,"NOERROR","No Error", + 1,"FORMERR","FormErr", + 2,"SERVFAIL","ServFail", + 3,"NXDOMAIN","NXDomain", + 4,"NOTIMP","NotImp", + 5,"REFUSED","Refused" + ]; + let dummy_table = datatable(TimeGenerated: datetime, uid_s: string) []; + let corelight_dns_agg = view () { + union isfuzzy=true + Corelight_v2_dns_agg_CL, + dummy_table + | summarize arg_max(TimeGenerated, *) by uid_s + | join kind=leftouter + ( corelight_conn + | project uid, local_orig, local_resp + ) on $left.uid_s == $right.uid + | project-away uid + | extend + path=column_ifexists("_path_s", ""), + system_name=column_ifexists("_system_name_s", ""), + id_orig_h=column_ifexists("id_orig_h_s", ""), + id_orig_p=column_ifexists("id_orig_p_d", real(null)), + id_resp_h=column_ifexists("id_resp_h_s", ""), + id_resp_p=column_ifexists("id_resp_p_d", real(null)), + query = column_ifexists("query_s", ""), + qtype = column_ifexists("qtype_d", real(null)), + answers = column_ifexists("answers_s", ""), + rcode = column_ifexists("rcode_d", long(null)), + uid=column_ifexists("uid_s", ""), + qtype_name = column_ifexists("qtype_name_s", ""), + rcode_name = column_ifexists("rcode_name_s", ""), + rejected = column_ifexists("rejected_b", ""), + proto=column_ifexists("proto_s", ""), + trans_id = column_ifexists("trans_id_d", ""), + rtt = column_ifexists("rtt_d", real(null)), + qclass = column_ifexists("qclass_d", real(null)), + qclass_name = column_ifexists("qclass_name_s", ""), + AA = column_ifexists("AA_b", ""), + RA = column_ifexists("RA_b", ""), + RD = column_ifexists("RD_b", ""), + TC = column_ifexists("TC_b", ""), + TTLs = column_ifexists("TTLs_s", ""), + Z = column_ifexists("Z_d", real(null)), + icann_domain = column_ifexists("icann_domain_s", ""), + icann_host_subdomain = column_ifexists("icann_host_subdomain_s", ""), + icann_tld = column_ifexists("icann_tld_s", ""), + is_trusted_domain = column_ifexists("is_trusted_domain_b", "") + | extend rcode = toint(rcode) + | lookup DNSLookup on rcode + | extend + EventVendor = "Corelight", + EventProduct = "CorelightSensor", + EventType = "dns_agg", + ts = TimeGenerated, + dns_flags_authoritative_answer = AA, + dns_flags_recursion_available = RA, + dns_flags_truncated_response = TC, + ttl = TTLs, + src = id_orig_h, + src_ip = id_orig_h, + src_port = id_orig_p, + dest = id_resp_h, + dest_ip = id_resp_h, + dest_port = id_resp_p, + record_class = qclass_name, + record_type = qtype_name, + reply_code_id = rcode, + dns_flags_rejected = rejected, + duration = rtt, + response_time = rtt, + transaction_id = trans_id, + session_id = uid, + answer_count = array_length(todynamic(answers)), + query_count = array_length(todynamic(query)), + sensor_name = coalesce(system_name, "unknown"), + reply_code = cim_reply_code + | extend + is_broadcast =iff(src in("0.0.0.0", "255.255.255.255") or dest in("255.255.255.255", "0.0.0.0"),"true","false"), + is_dest_internal_ip = iff(local_resp == true, "true", "false"), + is_src_internal_ip = iff(local_orig == true, "true", "false"), + transport = iff(proto == "icmp" and id_orig_h matches regex ".*:.*", "icmp6", proto), + query_length = strlen(query), + answer_length = iff(answer_count == 1, strlen(answers), tolong('')), + message_type = iff(isnotnull(rcode), "Response", "Query") + | project + TimeGenerated, + path, + system_name, + id_orig_h, + id_orig_p, + id_resp_h, + id_resp_p, + query, + qtype, + answers, + answer_count, + rcode, + uid, + qtype_name, + rcode_name, + rejected, + proto, + trans_id, + rtt, + qclass, + qclass_name, + AA, + RA, + RD, + TC, + TTLs, + Z, + icann_domain, + icann_host_subdomain, + icann_tld, + is_trusted_domain, + dns_flags_authoritative_answer, + dns_flags_recursion_available, + dns_flags_truncated_response, + ttl, + src, + src_ip, + src_port, + dest, + dest_ip, + dest_port, + record_class, + record_type, + reply_code_id, + dns_flags_rejected, + duration, + response_time, + transaction_id, + session_id, + query_count, + sensor_name, + reply_code, + is_broadcast, + is_dest_internal_ip, + is_src_internal_ip, + transport, + query_length, + answer_length, + message_type, + EventVendor, + EventProduct, + EventType, + ts + }; + corelight_dns_agg \ No newline at end of file diff --git a/Solutions/Corelight/Parsers/corelight_files_agg.yaml b/Solutions/Corelight/Parsers/corelight_files_agg.yaml new file mode 100644 index 00000000000..1701f665075 --- /dev/null +++ b/Solutions/Corelight/Parsers/corelight_files_agg.yaml @@ -0,0 +1,114 @@ +id: db9b0306-2100-4bb3-b731-50ae5c8b8fea +Function: + Title: Corelight Files Aggregated Events + Version: '1.1.0' + LastUpdated: '2025-11-26' +Category: Microsoft Sentinel Parser +FunctionName: corelight_files_agg +FunctionAlias: corelight_files_agg +FunctionQuery: | + let dummy_table = datatable(TimeGenerated: datetime, uid_s: string) []; + let corelight_files_agg = view () { + union isfuzzy=true + Corelight_v2_files_agg_CL, + dummy_table + | summarize arg_max(TimeGenerated, *) by uid_s + | join kind=leftouter + ( corelight_conn + | project uid, local_orig, local_resp + ) on $left.uid_s == $right.uid + | project-away uid + | extend + path=column_ifexists("_path_s", ""), + system_name=column_ifexists("_system_name_s", ""), + id_orig_h=column_ifexists("id_orig_h_s", ""), + id_orig_p=column_ifexists("id_orig_p_d", real(null)), + id_resp_h=column_ifexists("id_resp_h_s", ""), + id_resp_p=column_ifexists("id_resp_p_d", real(null)), + source = column_ifexists("source_s", ""), + analyzers = column_ifexists("analyzers_s", ""), + filename = column_ifexists("filename_s", ""), + md5 = column_ifexists("md5_s", ""), + is_orig = column_ifexists("is_orig_b", ""), + local_orig = column_ifexists("local_orig_b", ""), + sha1 = column_ifexists("sha1_s", ""), + sha256 = column_ifexists("sha256_s", ""), + fuid = column_ifexists("fuid_s", ""), + uid=column_ifexists("uid_s", ""), + parent_fuid = column_ifexists("parent_fuid_s", ""), + mime_type = column_ifexists("mime_type_s", ""), + duration = column_ifexists("duration_d", real(null)), + seen_bytes = column_ifexists("seen_bytes_d", real(null)), + total_bytes = column_ifexists("total_bytes_d", real(null)), + missing_bytes = column_ifexists("missing_bytes_d", real(null)), + overflow_bytes = column_ifexists("overflow_bytes_d", real(null)), + timedout = column_ifexists("timedout_b", ""), + depth = column_ifexists("depth_d", real(null)) + | extend + EventVendor = "Corelight", + EventProduct = "CorelightSensor", + EventType = "files_agg", + ts = TimeGenerated, + bytes = seen_bytes, + file_size = total_bytes, + file_name = filename, + object = filename, + src = id_orig_h, + src_ip = id_orig_h, + src_port = id_orig_p, + dest = id_resp_h, + dest_ip = id_resp_h, + dest_port = id_resp_p, + app = source, + file_hash = coalesce(md5, sha1, sha256, "unknown"), + sensor_name = coalesce(system_name, "unknown") + | extend + is_broadcast =iff(src in("0.0.0.0", "255.255.255.255") or dest in("255.255.255.255", "0.0.0.0"),"true","false"), + is_dest_internal_ip = iff(local_resp == true, "true", "false"), + is_src_internal_ip = iff(local_orig == true, "true", "false") + | project + TimeGenerated, + path, + system_name, + id_orig_h, + id_orig_p, + id_resp_h, + id_resp_p, + source, + analyzers, + filename, + md5, + is_orig, + local_orig, + sha1, + sha256, + fuid, + uid, + parent_fuid, + mime_type, + duration, + seen_bytes, + total_bytes, + missing_bytes, + overflow_bytes, + timedout, + depth, + bytes, + file_size, + file_name, + object, + src, + src_ip, + src_port, + dest, + dest_ip, + dest_port, + app, + file_hash, + sensor_name, + EventVendor, + EventProduct, + EventType, + ts + }; + corelight_files_agg \ No newline at end of file diff --git a/Solutions/Corelight/Parsers/corelight_http_agg.yaml b/Solutions/Corelight/Parsers/corelight_http_agg.yaml new file mode 100644 index 00000000000..8881ff49161 --- /dev/null +++ b/Solutions/Corelight/Parsers/corelight_http_agg.yaml @@ -0,0 +1,186 @@ +id: 204fc13c-53f1-42f8-a101-d68e653f216f +Function: + Title: Corelight HTTP Aggregated Events + Version: '1.1.0' + LastUpdated: '2025-11-26' +Category: Microsoft Sentinel Parser +FunctionName: corelight_http_agg +FunctionAlias: corelight_http_agg +FunctionQuery: | + let StatusLookup = datatable( + status: string, + action: string + )[ + "success","allowed", + "failure","blocked", + "200","success", + "204","success", + "206","success", + "207","success", + "301","success", + "302","success", + "303","success", + "304","success", + "307","success", + "400","failure", + "401","failure", + "403","failure", + "404","failure", + "408","failure", + "500","failure", + "503","failure", + "504","failure" + ]; + let dummy_table = datatable(TimeGenerated: datetime, uid_s: string) []; + let corelight_http_agg = view () { + union isfuzzy=true + Corelight_v2_http_agg_CL, + dummy_table + | summarize arg_max(TimeGenerated, *) by uid_s + | join kind=leftouter + ( corelight_conn + | project uid, local_orig, local_resp + ) on $left.uid_s == $right.uid + | project-away uid + | extend + path=column_ifexists("_path_s", ""), + system_name=column_ifexists("_system_name_s", ""), + id_orig_h=column_ifexists("id_orig_h_s", ""), + id_orig_p=column_ifexists("id_orig_p_d", real(null)), + id_resp_h=column_ifexists("id_resp_h_s", ""), + id_resp_p=column_ifexists("id_resp_p_d", real(null)), + method = column_ifexists("method_s", ""), + host = column_ifexists("host_s", ""), + uid = column_ifexists("uid_s", ""), + uri = column_ifexists("uri_s", ""), + referrer = column_ifexists("referrer_s", ""), + status_code = column_ifexists("status_code_d", real(null)), + status_msg = column_ifexists("status_msg_s", ""), + request_body_len = column_ifexists("request_body_len_d", real(null)), + response_body_len = column_ifexists("response_body_len_d", real(null)), + tags = column_ifexists("tags_s", ""), + orig_mime_types = column_ifexists("orig_mime_types_s", ""), + resp_mime_types = column_ifexists("resp_mime_types_s", ""), + post_body = column_ifexists("post_body_s", ""), + orig_fuids = column_ifexists("orig_fuids_s", ""), + orig_filenames = column_ifexists("orig_filenames_s", ""), + resp_fuids = column_ifexists("resp_fuids_s", ""), + resp_filenames = column_ifexists("resp_filenames_s", ""), + version = column_ifexists("version_s", ""), + user_agent = column_ifexists("user_agent_s", ""), + username = column_ifexists("username_s", ""), + pwd = column_ifexists("password_s", ""), + proxied = column_ifexists("proxied_s", ""), + origin = column_ifexists("origin_s", ""), + info_code = column_ifexists("info_code_d", real(null)), + info_msg = column_ifexists("info_msg_s", "") + | extend status_code = tostring(toint(status_code)) + | lookup StatusLookup on $left.status_code == $right.status + | extend + EventVendor = "Corelight", + EventProduct = "CorelightSensor", + EventType = "http_agg", + ts = TimeGenerated, + dest_host = host, + src = id_orig_h, + src_ip = id_orig_h, + src_port = id_orig_p, + dest = id_resp_h, + dest_ip = id_resp_h, + dest_port = id_resp_p, + http_method = method, + bytes_in = request_body_len, + bytes_out = response_body_len, + status = status_code, + vendor_action = status_msg, + uri_path = uri, + object = resp_filenames, + http_user_agent = user_agent, + http_referrer = referrer, + http_content_type = orig_mime_types, + sensor_name = coalesce(system_name, "unknown"), + http_version = version, + http_username = username + | extend + http_user_agent_length = strlen(http_user_agent), + bytes = bytes_in + bytes_out, + is_broadcast =iff(src in("0.0.0.0", "255.255.255.255") or dest in("255.255.255.255", "0.0.0.0"),"true","false"), + is_dest_internal_ip = iff(local_resp == true, "true", "false"), + is_src_internal_ip = iff(local_orig == true, "true", "false"), + host_header=dest_host, + referrer_domain_domain=parse_url(referrer).Host, + referrer_domain_ip=strcat(parse_url(referrer).Host, ":", parse_url(referrer).Port), + direction=case(local_orig=="true" and local_resp=="true", "internal", local_orig=="true" and local_resp=="false", "outbound", local_orig=="false" and local_resp=="false", "external", local_orig=="false" and local_resp=="true", "inbound", "unknown") + | extend + url = strcat("http://",host_header,uri), + url_domain = host_header + | project + system_name, + path, + id_orig_h, + id_orig_p, + id_resp_h, + id_resp_p, + method, + host, + uid, + uri, + referrer, + status_code, + status_msg, + request_body_len, + response_body_len, + tags, + orig_mime_types, + resp_mime_types, + post_body, + orig_fuids, + orig_filenames, + resp_fuids, + resp_filenames, + version, + user_agent, + username, + pwd, + proxied, + origin, + info_code, + info_msg, + EventVendor, + EventProduct, + EventType, + ts, + dest_host, + src, + src_ip, + src_port, + dest, + dest_ip, + dest_port, + http_method, + bytes_in, + bytes_out, + status, + vendor_action, + uri_path, + object, + http_user_agent, + http_referrer, + http_content_type, + sensor_name, + http_version, + http_username, + http_user_agent_length, + bytes, + is_broadcast, + is_dest_internal_ip, + is_src_internal_ip, + host_header, + referrer_domain_domain, + referrer_domain_ip, + direction, + url, + url_domain, + TimeGenerated + }; + corelight_http_agg \ No newline at end of file diff --git a/Solutions/Corelight/Parsers/corelight_ssl_agg.yaml b/Solutions/Corelight/Parsers/corelight_ssl_agg.yaml new file mode 100644 index 00000000000..965e5c03319 --- /dev/null +++ b/Solutions/Corelight/Parsers/corelight_ssl_agg.yaml @@ -0,0 +1,115 @@ +id: 86f7ddaf-abab-4e40-9bef-c443a562ea07 +Function: + Title: Corelight SSL Aggregated Events + Version: '1.1.0' + LastUpdated: '2025-11-26' +Category: Microsoft Sentinel Parser +FunctionName: corelight_ssl_agg +FunctionAlias: corelight_ssl_agg +FunctionQuery: | + let dummy_table = datatable(TimeGenerated: datetime, uid_s: string) []; + let corelight_ssl_agg = view () { + union isfuzzy=true + Corelight_v2_ssl_agg_CL, + dummy_table + | summarize arg_max(TimeGenerated, *) by uid_s + | join kind=leftouter + ( corelight_conn + | project uid, local_orig, local_resp + ) on $left.uid_s == $right.uid + | project-away uid + | extend + path=column_ifexists("_path_s", ""), + system_name=column_ifexists("_system_name_s", ""), + uid = column_ifexists("uid_s", ""), + id_orig_h=column_ifexists("id_orig_h_s", ""), + id_orig_p=column_ifexists("id_orig_p_d", real(null)), + id_resp_h=column_ifexists("id_resp_h_s", ""), + id_resp_p=column_ifexists("id_resp_p_d", real(null)), + version=column_ifexists("version_s", ""), + cipher=column_ifexists("cipher_s", ""), + curve=column_ifexists("curve_s", ""), + established=column_ifexists("established_b", ""), + server_name=column_ifexists("server_name_s", ""), + next_protocol=column_ifexists("next_protocol_s", ""), + ssl_history=column_ifexists("ssl_history_s", ""), + cert_chain_fps=column_ifexists("cert_chain_fps_s", ""), + client_cert_chain_fps=column_ifexists("client_cert_chain_fps_s", ""), + validation_status=column_ifexists("validation_status_s", ""), + ja3=column_ifexists("ja3_s", ""), + ja3s=column_ifexists("ja3s_s", ""), + resumed=column_ifexists("resumed_b", ""), + sni_matches_cert=column_ifexists("sni_matches_cert_b", "") + | extend + EventVendor="Corelight", + EventProduct="CorelightSensor", + EventType="ssl_agg", + ts = TimeGenerated, + src=id_orig_h, + src_ip=id_orig_h, + src_port=id_orig_p, + dest=id_resp_h, + dest_ip=id_resp_h, + dest_port=id_resp_p, + ssl_cipher=cipher, + ssl_curve=curve, + ssl_subject_common_name=server_name, + fingerprint=cert_chain_fps, + is_self_signed = iff(validation_status=="self signed certificate", "yes", "no"), + action = iff(established=="true","success","failure"), + sensor_name = coalesce(system_name, "unknown"), + signature=validation_status, + ssl_version = version + | extend + is_broadcast = iff(src in("0.0.0.0", "255.255.255.255") or dest in("255.255.255.255", "0.0.0.0"),"true","false"), + is_src_internal_ip = iff(local_orig == true, "true", "false"), + is_dest_internal_ip = iff(local_resp == true, "true", "false"), + direction=case(local_orig=="true" and local_resp=="true", "internal", local_orig=="true" and local_resp=="false", "outbound", local_orig=="false" and local_resp=="false", "external", local_orig=="false" and local_resp=="true", "inbound", "unknown") + | project + path, + system_name, + uid, + id_orig_h, + id_orig_p, + id_resp_h, + id_resp_p, + version, + cipher, + curve, + established, + server_name, + next_protocol, + ssl_history, + cert_chain_fps, + client_cert_chain_fps, + validation_status, + ja3, + ja3s, + resumed, + sni_matches_cert, + EventVendor, + EventProduct, + EventType, + ts, + src, + src_ip, + src_port, + dest, + dest_ip, + dest_port, + ssl_cipher, + ssl_curve, + ssl_subject_common_name, + fingerprint, + is_self_signed, + action, + sensor_name, + signature, + ssl_version, + is_broadcast, + is_src_internal_ip, + is_dest_internal_ip, + direction, + TimeGenerated + }; + corelight_ssl_agg \ No newline at end of file diff --git a/Solutions/Corelight/Parsers/corelight_weird_agg.yaml b/Solutions/Corelight/Parsers/corelight_weird_agg.yaml new file mode 100644 index 00000000000..f8d61b1a75b --- /dev/null +++ b/Solutions/Corelight/Parsers/corelight_weird_agg.yaml @@ -0,0 +1,53 @@ +id: a3a13f8a-2b91-4be6-88ce-00a84c5e2105 +Function: + Title: Corelight WEIRD Aggregated Events + Version: '1.1.0' + LastUpdated: '2025-11-26' +Category: Microsoft Sentinel Parser +FunctionName: corelight_weird_agg +FunctionAlias: corelight_weird_agg +FunctionQuery: | + let dummy_table = datatable(TimeGenerated: datetime, uid_s: string) []; + let corelight_weird_agg = view () { + union isfuzzy=true + Corelight_v2_weird_agg_CL, + dummy_table + | summarize arg_max(TimeGenerated, *) by uid_s + | extend + path=column_ifexists("_path_s", ""), + system_name=column_ifexists("_system_name_s", ""), + id_orig_h=column_ifexists("id_orig_h_s", ""), + id_orig_p=column_ifexists("id_orig_p_d", real(null)), + id_resp_h=column_ifexists("id_resp_h_s", ""), + id_resp_p=column_ifexists("id_resp_p_d", real(null)), + name=column_ifexists("name_s", ""), + uid=column_ifexists("uid_s", ""), + addl=column_ifexists("addl_s", ""), + notice=column_ifexists("notice_b", ""), + peer=column_ifexists("peer_s", ""), + source=column_ifexists("source_s", "") + | extend + EventVendor="Corelight", + EventProduct="CorelightSensor", + EventType="weird_agg", + ts = TimeGenerated + | project + path, + system_name, + id_orig_h, + id_orig_p, + id_resp_h, + id_resp_p, + name, + uid, + addl, + notice, + peer, + source, + EventVendor, + EventProduct, + EventType, + ts, + TimeGenerated + }; + corelight_weird_agg \ No newline at end of file diff --git a/Solutions/Corelight/ReleaseNotes.md b/Solutions/Corelight/ReleaseNotes.md index abd16261b87..7dec882e771 100644 --- a/Solutions/Corelight/ReleaseNotes.md +++ b/Solutions/Corelight/ReleaseNotes.md @@ -1,5 +1,6 @@ | **Version** | **Date Modified (DD-MM-YYYY)** | **Change History** | |-------------|--------------------------------|--------------------------------------------------------------------| +| 3.2.2 | 01-12-2025 | Added Corelight Aggregation Parsers. | 3.2.1 | 30-10-2025 | Added corelight_first_seen and corelight_anomaly Parsers. | 3.2.0 | 05-03-2025 | Added new Parsers, Workbooks and Watchlists. | 3.1.0 | 27-09-2024 | Updated Parsers and added new tabs in Workbook. diff --git a/Solutions/CrowdStrike Falcon Endpoint Protection/Data Connectors/CrowdStrikeAPI_ccp/CrowdStrikeAPI_Definition.json b/Solutions/CrowdStrike Falcon Endpoint Protection/Data Connectors/CrowdStrikeAPI_ccp/CrowdStrikeAPI_Definition.json index fdef1404311..9e850dc71f9 100644 --- a/Solutions/CrowdStrike Falcon Endpoint Protection/Data Connectors/CrowdStrikeAPI_ccp/CrowdStrikeAPI_Definition.json +++ b/Solutions/CrowdStrike Falcon Endpoint Protection/Data Connectors/CrowdStrikeAPI_ccp/CrowdStrikeAPI_Definition.json @@ -130,7 +130,10 @@ "label": "Base API URL", "placeholder": "https://api.us-2.crowdstrike.com", "type": "text", - "name": "apiUrl" + "name": "apiUrl", + "validations": { + "required": true + } } }, { @@ -139,7 +142,10 @@ "label": "Client ID", "placeholder": "Your Client ID", "type": "text", - "name": "clientId" + "name": "clientId", + "validations": { + "required": true + } } }, { @@ -148,7 +154,10 @@ "label": "Client Secret", "placeholder": "Your Client Secret", "type": "password", - "name": "clientSecret" + "name": "clientSecret", + "validations": { + "required": true + } } }, { diff --git a/Solutions/CrowdStrike Falcon Endpoint Protection/Data Connectors/CrowdStrikeAPI_ccp/CrowdStrikeAPI_PollingConfig.json b/Solutions/CrowdStrike Falcon Endpoint Protection/Data Connectors/CrowdStrikeAPI_ccp/CrowdStrikeAPI_PollingConfig.json index 00cb671d3e3..06a175ea4a2 100644 --- a/Solutions/CrowdStrike Falcon Endpoint Protection/Data Connectors/CrowdStrikeAPI_ccp/CrowdStrikeAPI_PollingConfig.json +++ b/Solutions/CrowdStrike Falcon Endpoint Protection/Data Connectors/CrowdStrikeAPI_ccp/CrowdStrikeAPI_PollingConfig.json @@ -21,6 +21,31 @@ "apiEndpoint": "[[concat(parameters('apiUrl'),'/spotlight/combined/vulnerabilities/v1')]", "httpMethod": "GET", "rateLimitQPS": 1, + "rateLimitConfig": { + "evaluation": { + "checkMode": "OnlyWhen429" + }, + "extraction": { + "source": "CustomHeaders", + "headers": { + "limit": { + "name": "X-RateLimit-Limit", + "format": "Integer" + }, + "remaining": { + "name": "X-RateLimit-Remaining", + "format": "Integer" + }, + "reset": { + "name": "X-RateLimit-RetryAfter", + "format": "UnixTimeSeconds" + } + } + }, + "retryStrategy": { + "useResetOrRetryAfterHeaders": true + } + }, "queryWindowInMin": 15, "queryTimeFormat": "yyyy-MM-ddTHH:mm:ssZ", "retryCount": 5, @@ -45,7 +70,7 @@ "pagingType": "NextPageToken", "nextPageTokenJsonPath": "$.meta.pagination.after", "NextPageParaName": "after", - "pageSize": 50, + "pageSize": 500, "pageSizeParameterName": "limit" }, "connectorDefinitionName": "CrowdStrikeAPICCPDefinition", @@ -79,6 +104,31 @@ "apiEndpoint": "[[concat(parameters('apiUrl'),'/alerts/combined/alerts/v1')]", "httpMethod": "POST", "rateLimitQPS": 1, + "rateLimitConfig": { + "evaluation": { + "checkMode": "OnlyWhen429" + }, + "extraction": { + "source": "CustomHeaders", + "headers": { + "limit": { + "name": "X-RateLimit-Limit", + "format": "Integer" + }, + "remaining": { + "name": "X-RateLimit-Remaining", + "format": "Integer" + }, + "reset": { + "name": "X-RateLimit-RetryAfter", + "format": "UnixTimeSeconds" + } + } + }, + "retryStrategy": { + "useResetOrRetryAfterHeaders": true + } + }, "queryWindowInMin": 15, "queryTimeFormat": "yyyy-MM-ddTHH:mm:ssZ", "retryCount": 5, @@ -101,7 +151,7 @@ "pagingType": "PersistentToken", "nextPageTokenJsonPath": "$.meta.pagination.after", "nextPageParaName": "after", - "pageSize": 50 + "pageSize": 500 }, "connectorDefinitionName": "CrowdStrikeAPICCPDefinition", "dataType": "CrowdStrikeAlerts", @@ -134,6 +184,31 @@ "apiEndpoint": "[[concat(parameters('apiUrl'),'/incidents/queries/incidents/v1')]", "httpMethod": "GET", "rateLimitQPS": 1, + "rateLimitConfig": { + "evaluation": { + "checkMode": "OnlyWhen429" + }, + "extraction": { + "source": "CustomHeaders", + "headers": { + "limit": { + "name": "X-RateLimit-Limit", + "format": "Integer" + }, + "remaining": { + "name": "X-RateLimit-Remaining", + "format": "Integer" + }, + "reset": { + "name": "X-RateLimit-RetryAfter", + "format": "UnixTimeSeconds" + } + } + }, + "retryStrategy": { + "useResetOrRetryAfterHeaders": true + } + }, "queryWindowInMin": 15, "queryTimeFormat": "yyyy-MM-ddTHH:mm:ssZ", "retryCount": 5, @@ -157,7 +232,7 @@ "paging": { "pagingType": "Offset", "offsetParaName": "offset", - "pageSize": 50, + "pageSize": 500, "pageSizeParameterName": "limit" }, "stepInfo": { @@ -227,6 +302,31 @@ "apiEndpoint": "[[concat(parameters('apiUrl'),'/alerts/combined/alerts/v1')]", "httpMethod": "POST", "rateLimitQPS": 1, + "rateLimitConfig": { + "evaluation": { + "checkMode": "OnlyWhen429" + }, + "extraction": { + "source": "CustomHeaders", + "headers": { + "limit": { + "name": "X-RateLimit-Limit", + "format": "Integer" + }, + "remaining": { + "name": "X-RateLimit-Remaining", + "format": "Integer" + }, + "reset": { + "name": "X-RateLimit-RetryAfter", + "format": "UnixTimeSeconds" + } + } + }, + "retryStrategy": { + "useResetOrRetryAfterHeaders": true + } + }, "queryWindowInMin": 15, "queryTimeFormat": "yyyy-MM-ddTHH:mm:ssZ", "retryCount": 5, @@ -249,7 +349,7 @@ "pagingType": "PersistentToken", "nextPageTokenJsonPath": "$.meta.pagination.after", "nextPageParaName": "after", - "pageSize": 50 + "pageSize": 500 }, "connectorDefinitionName": "CrowdStrikeAPICCPDefinition", "dataType": "CrowdStrikeDetections", @@ -282,6 +382,31 @@ "apiEndpoint": "[[concat(parameters('apiUrl'),'/devices/combined/devices/v1')]", "httpMethod": "GET", "rateLimitQPS": 1, + "rateLimitConfig": { + "evaluation": { + "checkMode": "OnlyWhen429" + }, + "extraction": { + "source": "CustomHeaders", + "headers": { + "limit": { + "name": "X-RateLimit-Limit", + "format": "Integer" + }, + "remaining": { + "name": "X-RateLimit-Remaining", + "format": "Integer" + }, + "reset": { + "name": "X-RateLimit-RetryAfter", + "format": "UnixTimeSeconds" + } + } + }, + "retryStrategy": { + "useResetOrRetryAfterHeaders": true + } + }, "queryWindowInMin": 15, "queryTimeFormat": "yyyy-MM-ddTHH:mm:ssZ", "retryCount": 5, @@ -306,7 +431,7 @@ "pagingType": "NextPageToken", "nextPageTokenJsonPath": "$.meta.pagination.next", "NextPageParaName": "offset", - "pageSize": 50, + "pageSize": 500, "pageSizeParameterName": "limit" }, "connectorDefinitionName": "CrowdStrikeAPICCPDefinition", diff --git a/Solutions/CrowdStrike Falcon Endpoint Protection/Data/Solution_CrowdStrike.json b/Solutions/CrowdStrike Falcon Endpoint Protection/Data/Solution_CrowdStrike.json index f3c734d5122..d94f9b14ca8 100644 --- a/Solutions/CrowdStrike Falcon Endpoint Protection/Data/Solution_CrowdStrike.json +++ b/Solutions/CrowdStrike Falcon Endpoint Protection/Data/Solution_CrowdStrike.json @@ -30,7 +30,7 @@ "azuresentinel.azure-sentinel-solution-commoneventformat" ], "BasePath": "C:\\GitHub\\Azure-Sentinel\\Solutions\\CrowdStrike Falcon Endpoint Protection", - "Version": "3.1.7", + "Version": "3.1.8", "Metadata": "SolutionMetadata.json", "TemplateSpec": true, "Is1Pconnector": false diff --git a/Solutions/CrowdStrike Falcon Endpoint Protection/Package/3.1.8.zip b/Solutions/CrowdStrike Falcon Endpoint Protection/Package/3.1.8.zip new file mode 100644 index 00000000000..261f0c74177 Binary files /dev/null and b/Solutions/CrowdStrike Falcon Endpoint Protection/Package/3.1.8.zip differ diff --git a/Solutions/CrowdStrike Falcon Endpoint Protection/Package/mainTemplate.json b/Solutions/CrowdStrike Falcon Endpoint Protection/Package/mainTemplate.json index 58e6acab4e3..c7c431bb857 100644 --- a/Solutions/CrowdStrike Falcon Endpoint Protection/Package/mainTemplate.json +++ b/Solutions/CrowdStrike Falcon Endpoint Protection/Package/mainTemplate.json @@ -55,7 +55,7 @@ "email": "support@microsoft.com", "_email": "[variables('email')]", "_solutionName": "CrowdStrike Falcon Endpoint Protection", - "_solutionVersion": "3.1.7", + "_solutionVersion": "3.1.8", "solutionId": "azuresentinel.azure-sentinel-solution-crowdstrikefalconep", "_solutionId": "[variables('solutionId')]", "uiConfigId1": "CrowdstrikeReplicatorv2", @@ -168,7 +168,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "CrowdStrike Falcon Endpoint Protection data connector with template version 3.1.7", + "description": "CrowdStrike Falcon Endpoint Protection data connector with template version 3.1.8", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('dataConnectorVersion1')]", @@ -552,7 +552,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "CrowdStrike Falcon Endpoint Protection data connector with template version 3.1.7", + "description": "CrowdStrike Falcon Endpoint Protection data connector with template version 3.1.8", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('dataConnectorVersion2')]", @@ -8707,7 +8707,10 @@ "label": "Base API URL", "placeholder": "https://api.us-2.crowdstrike.com", "type": "text", - "name": "apiUrl" + "name": "apiUrl", + "validations": { + "required": true + } } }, { @@ -8716,7 +8719,10 @@ "label": "Client ID", "placeholder": "Your Client ID", "type": "text", - "name": "clientId" + "name": "clientId", + "validations": { + "required": true + } } }, { @@ -8725,7 +8731,10 @@ "label": "Client Secret", "placeholder": "Your Client Secret", "type": "password", - "name": "clientSecret" + "name": "clientSecret", + "validations": { + "required": true + } } }, { @@ -8981,7 +8990,10 @@ "label": "Base API URL", "placeholder": "https://api.us-2.crowdstrike.com", "type": "text", - "name": "apiUrl" + "name": "apiUrl", + "validations": { + "required": true + } } }, { @@ -8990,7 +9002,10 @@ "label": "Client ID", "placeholder": "Your Client ID", "type": "text", - "name": "clientId" + "name": "clientId", + "validations": { + "required": true + } } }, { @@ -8999,7 +9014,10 @@ "label": "Client Secret", "placeholder": "Your Client Secret", "type": "password", - "name": "clientSecret" + "name": "clientSecret", + "validations": { + "required": true + } } }, { @@ -9160,6 +9178,31 @@ "apiEndpoint": "[[concat(parameters('apiUrl'),'/spotlight/combined/vulnerabilities/v1')]", "httpMethod": "GET", "rateLimitQPS": 1, + "rateLimitConfig": { + "evaluation": { + "checkMode": "OnlyWhen429" + }, + "extraction": { + "source": "CustomHeaders", + "headers": { + "limit": { + "name": "X-RateLimit-Limit", + "format": "Integer" + }, + "remaining": { + "name": "X-RateLimit-Remaining", + "format": "Integer" + }, + "reset": { + "name": "X-RateLimit-RetryAfter", + "format": "UnixTimeSeconds" + } + } + }, + "retryStrategy": { + "useResetOrRetryAfterHeaders": true + } + }, "queryWindowInMin": 15, "queryTimeFormat": "yyyy-MM-ddTHH:mm:ssZ", "retryCount": 5, @@ -9184,7 +9227,7 @@ "pagingType": "NextPageToken", "nextPageTokenJsonPath": "$.meta.pagination.after", "NextPageParaName": "after", - "pageSize": 50, + "pageSize": 500, "pageSizeParameterName": "limit" }, "connectorDefinitionName": "CrowdStrikeAPICCPDefinition", @@ -9218,6 +9261,31 @@ "apiEndpoint": "[[concat(parameters('apiUrl'),'/alerts/combined/alerts/v1')]", "httpMethod": "POST", "rateLimitQPS": 1, + "rateLimitConfig": { + "evaluation": { + "checkMode": "OnlyWhen429" + }, + "extraction": { + "source": "CustomHeaders", + "headers": { + "limit": { + "name": "X-RateLimit-Limit", + "format": "Integer" + }, + "remaining": { + "name": "X-RateLimit-Remaining", + "format": "Integer" + }, + "reset": { + "name": "X-RateLimit-RetryAfter", + "format": "UnixTimeSeconds" + } + } + }, + "retryStrategy": { + "useResetOrRetryAfterHeaders": true + } + }, "queryWindowInMin": 15, "queryTimeFormat": "yyyy-MM-ddTHH:mm:ssZ", "retryCount": 5, @@ -9240,7 +9308,7 @@ "pagingType": "PersistentToken", "nextPageTokenJsonPath": "$.meta.pagination.after", "nextPageParaName": "after", - "pageSize": 50 + "pageSize": 500 }, "connectorDefinitionName": "CrowdStrikeAPICCPDefinition", "dataType": "CrowdStrikeAlerts", @@ -9273,6 +9341,31 @@ "apiEndpoint": "[[concat(parameters('apiUrl'),'/incidents/queries/incidents/v1')]", "httpMethod": "GET", "rateLimitQPS": 1, + "rateLimitConfig": { + "evaluation": { + "checkMode": "OnlyWhen429" + }, + "extraction": { + "source": "CustomHeaders", + "headers": { + "limit": { + "name": "X-RateLimit-Limit", + "format": "Integer" + }, + "remaining": { + "name": "X-RateLimit-Remaining", + "format": "Integer" + }, + "reset": { + "name": "X-RateLimit-RetryAfter", + "format": "UnixTimeSeconds" + } + } + }, + "retryStrategy": { + "useResetOrRetryAfterHeaders": true + } + }, "queryWindowInMin": 15, "queryTimeFormat": "yyyy-MM-ddTHH:mm:ssZ", "retryCount": 5, @@ -9296,7 +9389,7 @@ "paging": { "pagingType": "Offset", "offsetParaName": "offset", - "pageSize": 50, + "pageSize": 500, "pageSizeParameterName": "limit" }, "stepInfo": { @@ -9366,6 +9459,31 @@ "apiEndpoint": "[[concat(parameters('apiUrl'),'/alerts/combined/alerts/v1')]", "httpMethod": "POST", "rateLimitQPS": 1, + "rateLimitConfig": { + "evaluation": { + "checkMode": "OnlyWhen429" + }, + "extraction": { + "source": "CustomHeaders", + "headers": { + "limit": { + "name": "X-RateLimit-Limit", + "format": "Integer" + }, + "remaining": { + "name": "X-RateLimit-Remaining", + "format": "Integer" + }, + "reset": { + "name": "X-RateLimit-RetryAfter", + "format": "UnixTimeSeconds" + } + } + }, + "retryStrategy": { + "useResetOrRetryAfterHeaders": true + } + }, "queryWindowInMin": 15, "queryTimeFormat": "yyyy-MM-ddTHH:mm:ssZ", "retryCount": 5, @@ -9388,7 +9506,7 @@ "pagingType": "PersistentToken", "nextPageTokenJsonPath": "$.meta.pagination.after", "nextPageParaName": "after", - "pageSize": 50 + "pageSize": 500 }, "connectorDefinitionName": "CrowdStrikeAPICCPDefinition", "dataType": "CrowdStrikeDetections", @@ -9421,6 +9539,31 @@ "apiEndpoint": "[[concat(parameters('apiUrl'),'/devices/combined/devices/v1')]", "httpMethod": "GET", "rateLimitQPS": 1, + "rateLimitConfig": { + "evaluation": { + "checkMode": "OnlyWhen429" + }, + "extraction": { + "source": "CustomHeaders", + "headers": { + "limit": { + "name": "X-RateLimit-Limit", + "format": "Integer" + }, + "remaining": { + "name": "X-RateLimit-Remaining", + "format": "Integer" + }, + "reset": { + "name": "X-RateLimit-RetryAfter", + "format": "UnixTimeSeconds" + } + } + }, + "retryStrategy": { + "useResetOrRetryAfterHeaders": true + } + }, "queryWindowInMin": 15, "queryTimeFormat": "yyyy-MM-ddTHH:mm:ssZ", "retryCount": 5, @@ -9445,7 +9588,7 @@ "pagingType": "NextPageToken", "nextPageTokenJsonPath": "$.meta.pagination.next", "NextPageParaName": "offset", - "pageSize": 50, + "pageSize": 500, "pageSizeParameterName": "limit" }, "connectorDefinitionName": "CrowdStrikeAPICCPDefinition", @@ -9477,7 +9620,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "CrowdStrikeFalconEventStream Data Parser with template version 3.1.7", + "description": "CrowdStrikeFalconEventStream Data Parser with template version 3.1.8", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject1').parserVersion1]", @@ -9609,7 +9752,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "CrowdstrikeReplicator Data Parser with template version 3.1.7", + "description": "CrowdstrikeReplicator Data Parser with template version 3.1.8", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject2').parserVersion2]", @@ -9741,7 +9884,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "CrowdStrikeReplicatorV2 Data Parser with template version 3.1.7", + "description": "CrowdStrikeReplicatorV2 Data Parser with template version 3.1.8", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject3').parserVersion3]", @@ -9873,7 +10016,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "CrowdStrikeFalconEndpointProtection Workbook with template version 3.1.7", + "description": "CrowdStrikeFalconEndpointProtection Workbook with template version 3.1.8", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('workbookVersion1')]", @@ -9961,7 +10104,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "CriticalOrHighSeverityDetectionsByUser_AnalyticalRules Analytics Rule with template version 3.1.7", + "description": "CriticalOrHighSeverityDetectionsByUser_AnalyticalRules Analytics Rule with template version 3.1.8", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('analyticRuleObject1').analyticRuleVersion1]", @@ -9989,18 +10132,18 @@ "status": "Available", "requiredDataConnectors": [ { - "connectorId": "CefAma", "dataTypes": [ "CommonSecurityLog" - ] + ], + "connectorId": "CefAma" } ], "entityMappings": [ { "fieldMappings": [ { - "columnName": "AccountCustomEntity", - "identifier": "FullName" + "identifier": "FullName", + "columnName": "AccountCustomEntity" } ], "entityType": "Account" @@ -10008,8 +10151,8 @@ { "fieldMappings": [ { - "columnName": "HostCustomEntity", - "identifier": "FullName" + "identifier": "FullName", + "columnName": "HostCustomEntity" } ], "entityType": "Host" @@ -10017,8 +10160,8 @@ { "fieldMappings": [ { - "columnName": "IPCustomEntity", - "identifier": "Address" + "identifier": "Address", + "columnName": "IPCustomEntity" } ], "entityType": "IP" @@ -10026,12 +10169,12 @@ { "fieldMappings": [ { - "columnName": "FileHashAlgo", - "identifier": "Algorithm" + "identifier": "Algorithm", + "columnName": "FileHashAlgo" }, { - "columnName": "FileHashCustomEntity", - "identifier": "Value" + "identifier": "Value", + "columnName": "FileHashCustomEntity" } ], "entityType": "FileHash" @@ -10090,7 +10233,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "CriticalSeverityDetection_AnalyticalRules Analytics Rule with template version 3.1.7", + "description": "CriticalSeverityDetection_AnalyticalRules Analytics Rule with template version 3.1.8", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('analyticRuleObject2').analyticRuleVersion2]", @@ -10118,18 +10261,18 @@ "status": "Available", "requiredDataConnectors": [ { - "connectorId": "CefAma", "dataTypes": [ "CommonSecurityLog" - ] + ], + "connectorId": "CefAma" } ], "entityMappings": [ { "fieldMappings": [ { - "columnName": "AccountCustomEntity", - "identifier": "FullName" + "identifier": "FullName", + "columnName": "AccountCustomEntity" } ], "entityType": "Account" @@ -10137,8 +10280,8 @@ { "fieldMappings": [ { - "columnName": "HostCustomEntity", - "identifier": "FullName" + "identifier": "FullName", + "columnName": "HostCustomEntity" } ], "entityType": "Host" @@ -10146,8 +10289,8 @@ { "fieldMappings": [ { - "columnName": "IPCustomEntity", - "identifier": "Address" + "identifier": "Address", + "columnName": "IPCustomEntity" } ], "entityType": "IP" @@ -10155,12 +10298,12 @@ { "fieldMappings": [ { - "columnName": "FileHashAlgo", - "identifier": "Algorithm" + "identifier": "Algorithm", + "columnName": "FileHashAlgo" }, { - "columnName": "FileHashCustomEntity", - "identifier": "Value" + "identifier": "Value", + "columnName": "FileHashCustomEntity" } ], "entityType": "FileHash" @@ -10219,7 +10362,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "CrowdStrike_Base Playbook with template version 3.1.7", + "description": "CrowdStrike_Base Playbook with template version 3.1.8", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('playbookVersion1')]", @@ -10596,7 +10739,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "Crowdstrike-EndpointEnrichment Playbook with template version 3.1.7", + "description": "Crowdstrike-EndpointEnrichment Playbook with template version 3.1.8", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('playbookVersion2')]", @@ -12051,7 +12194,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "Crowdstrike-ContainHost Playbook with template version 3.1.7", + "description": "Crowdstrike-ContainHost Playbook with template version 3.1.8", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('playbookVersion3')]", @@ -13166,7 +13309,7 @@ "apiVersion": "2023-04-01-preview", "location": "[parameters('workspace-location')]", "properties": { - "version": "3.1.7", + "version": "3.1.8", "kind": "Solution", "contentSchemaVersion": "3.0.0", "displayName": "CrowdStrike Falcon Endpoint Protection", diff --git a/Solutions/CrowdStrike Falcon Endpoint Protection/ReleaseNotes.md b/Solutions/CrowdStrike Falcon Endpoint Protection/ReleaseNotes.md index 830f6288471..6cd0de4cd7e 100644 --- a/Solutions/CrowdStrike Falcon Endpoint Protection/ReleaseNotes.md +++ b/Solutions/CrowdStrike Falcon Endpoint Protection/ReleaseNotes.md @@ -1,5 +1,6 @@ | **Version** | **Date Modified (DD-MM-YYYY)** | **Change History** | |-------------|--------------------------------|--------------------------------------------------------------------------------| +| 3.1.8 | 08-12-2025 | Updated *CrowdStrike API Data Connector* to fix rate limit exceptions by introducing retry logic. | | 3.1.7 | 12-11-2025 | Updated *CrowdStrike API Data Connector* to fix rate limit exceptions | | 3.1.6 | 23-10-2025 | Updated *CrowdStrike API Data Connector* to fix deprecated detections API issues | | 3.1.5 | 22-08-2025 | Updated *CrowdStrike API Data Connector* to fix duplicate logs issues | diff --git a/Solutions/CyeraDSPM/Data Connectors/CyeraDSPM_CCF/CyeraDSPMLogs_ConnectorDefinitionCCF.json b/Solutions/CyeraDSPM/Data Connectors/CyeraDSPM_CCF/CyeraDSPMLogs_ConnectorDefinitionCCF.json index 8cc48d0ab58..8a1e068f8f3 100644 --- a/Solutions/CyeraDSPM/Data Connectors/CyeraDSPM_CCF/CyeraDSPMLogs_ConnectorDefinitionCCF.json +++ b/Solutions/CyeraDSPM/Data Connectors/CyeraDSPM_CCF/CyeraDSPMLogs_ConnectorDefinitionCCF.json @@ -36,7 +36,7 @@ "name": "CyeraIdentities_CL" } ], - "descriptionMarkdown": "The [Cyera DSPM](https://api.cyera.io/) data connector allows you to connect to your Cyera's DSPM tenant and ingesting Classifications, Assets, Issues, and Identity Resources/Definitions into Microsoft Sentinel. The data connector is built on Microsoft Sentinel's Codeless Connector Framework and uses the Cyera's API to fetch Cyera's [DSPM Telemetry](https://www.cyera.com/) once recieced can be correlated with security events creating custom columns so that queries don't need to parse it again, thus resulting in better performance.", + "descriptionMarkdown": "The [Cyera DSPM](https://api.cyera.io/) data connector allows you to connect to your Cyera's DSPM tenant and ingesting Classifications, Assets, Issues, and Identity Resources/Definitions into Microsoft Sentinel. The data connector is built on Microsoft Sentinel's Codeless Connector Framework and uses the Cyera's API to fetch Cyera's [DSPM Telemetry](https://www.cyera.com/) once received can be correlated with security events creating custom columns so that queries don't need to parse it again, thus resulting in better performance.", "graphQueries": [ { "baseQuery": "CyeraClassifications_CL", @@ -372,7 +372,7 @@ ] }, "publisher": "Cyera Inc", - "title": "Cyera DSPM Azure Sentinel Data Connector" + "title": "Cyera DSPM Microsoft Sentinel Data Connector" }, "createdTimeUtc": "2025-10-31T15:13:26.2481931Z", "lastModifiedUtc": "2025-10-31T15:13:26.2481931Z" diff --git a/Solutions/CyeraDSPM/Data Connectors/CyeraDSPM_CCF/CyeraDSPM_DCR.json b/Solutions/CyeraDSPM/Data Connectors/CyeraDSPM_CCF/CyeraDSPM_DCR.json index 64eeab91f34..4cc1e32e61e 100644 --- a/Solutions/CyeraDSPM/Data Connectors/CyeraDSPM_CCF/CyeraDSPM_DCR.json +++ b/Solutions/CyeraDSPM/Data Connectors/CyeraDSPM_CCF/CyeraDSPM_DCR.json @@ -2,9 +2,9 @@ "type": "Microsoft.Insights/dataCollectionRules", "apiVersion": "2025-09-01", "name": "CyeraDSPMDCR", - "location": "{{location}}", + "location": "{{workspace-location}}", "properties": { - "dataCollectionEndpointId": "CyeraDSPMDataCollectionEndpoint", + "dataCollectionEndpointId": "[concat('/subscriptions/',parameters('subscription'),'/resourceGroups/',parameters('resourceGroupName'),'/providers/Microsoft.Insights/dataCollectionEndpoints/',parameters('workspace'))]", "streamDeclarations": { "Custom-CyeraAssets_SRC": { "columns": [ @@ -546,7 +546,7 @@ "destinations": { "logAnalytics": [ { - "workspaceResourceId": "{workspaceName}", + "workspaceResourceId": "{{workspaceResourceId}}", "name": "cyeradspm" } ] diff --git a/Solutions/CyeraDSPM/Data Connectors/CyeraDSPM_Functions/FunctionAppDC.json b/Solutions/CyeraDSPM/Data Connectors/CyeraDSPM_Functions/FunctionAppDC.json index 1783e8c412c..45ff5c0cf7e 100644 --- a/Solutions/CyeraDSPM/Data Connectors/CyeraDSPM_Functions/FunctionAppDC.json +++ b/Solutions/CyeraDSPM/Data Connectors/CyeraDSPM_Functions/FunctionAppDC.json @@ -1,9 +1,9 @@ { "id": "CyeraFunctionsConnector", - "title": "Cyera DSPM Azure Functions Sentinel Data Connector", + "title": "Cyera DSPM Azure Functions Microsoft Sentinel Data Connector", "publisher": "Cyera Inc", "logo": "{{icon-url}}", - "descriptionMarkdown": "The **Cyera DSPM Azure Function Connector** enables seamless ingestion of Cyera\u2019s **Data Security Posture Management (DSPM)** telemetry \u2014 *Assets*, *Identities*, *Issues*, and *Classifications* \u2014 into **Microsoft Sentinel**.\\n\\nThis connector uses an **Azure Function App** to call Cyera\u2019s REST API on a schedule, fetch the latest DSPM telemetry, and send it to Sentinel through the **Azure Monitor Logs Ingestion API** via a **Data Collection Endpoint (DCE)** and **Data Collection Rule (DCR, kind: Direct)** \u2014 no agents required.\\n\\n**Tables created/used**\\n\\n| Entity | Table | Purpose |\\n|---|---|---|\\n| Assets | `CyeraAssets_CL` | Raw asset metadata and data-store context |\\n| Identities | `CyeraIdentities_CL` | Identity definitions and sensitivity context |\\n| Issues | `CyeraIssues_CL` | Findings and remediation details |\\n| Classifications | `CyeraClassifications_CL` | Data class & sensitivity definitions |\\n| MS View | `CyeraAssets_MS_CL` | Normalized asset view for dashboards |\\n\\n> **Note:** This v7 connector supersedes the earlier CCF-based approach and aligns with Microsoft\u2019s recommended Direct ingestion path for Sentinel.", + "descriptionMarkdown": "The **Cyera DSPM Azure Function Connector** enables seamless ingestion of Cyera\u2019s **Data Security Posture Management (DSPM)** telemetry \u2014 *Assets*, *Identities*, *Issues*, and *Classifications* \u2014 into **Microsoft Sentinel**.\\n\\nThis connector uses an **Azure Function App** to call Cyera\u2019s REST API on a schedule, fetch the latest DSPM telemetry, and send it to Microsoft Sentinel through the **Azure Monitor Logs Ingestion API** via a **Data Collection Endpoint (DCE)** and **Data Collection Rule (DCR, kind: Direct)** \u2014 no agents required.\\n\\n**Tables created/used**\\n\\n| Entity | Table | Purpose |\\n|---|---|---|\\n| Assets | `CyeraAssets_CL` | Raw asset metadata and data-store context |\\n| Identities | `CyeraIdentities_CL` | Identity definitions and sensitivity context |\\n| Issues | `CyeraIssues_CL` | Findings and remediation details |\\n| Classifications | `CyeraClassifications_CL` | Data class & sensitivity definitions |\\n| MS View | `CyeraAssets_MS_CL` | Normalized asset view for dashboards |\\n\\n> **Note:** This v7 connector supersedes the earlier CCF-based approach and aligns with Microsoft\u2019s recommended Direct ingestion path for Microsoft Sentinel.", "additionalRequirementBanner": "This connector deploys KQL transforms in the DCR. No parser function is required.", "graphQueries": [ { @@ -157,7 +157,7 @@ { "parameters": { "fillWith": [ - "{{workspace}}" + "{{workspace-location}}" ], "label": "Workspace Name" }, diff --git a/Solutions/CyeraDSPM/Data/Solution_Cyera.json b/Solutions/CyeraDSPM/Data/Solution_Cyera.json index 41d2f92f6aa..8e9558709f1 100644 --- a/Solutions/CyeraDSPM/Data/Solution_Cyera.json +++ b/Solutions/CyeraDSPM/Data/Solution_Cyera.json @@ -1,7 +1,7 @@ { "Name": "CyeraDSPM", "Author": "Cyera DSPM OCTO Team - support@cyera.io", - "Logo": "", + "Logo": "", "Description": "The [Cyera DSPM](https://api.cyera.io/) data connector allows you to connect to your Cyera's DSPM instance and ingesting Classifications, Assets, Issues, and Identity Definitions into Microsoft Sentinel. The data connector is built on Microsoft Sentinel's Codeless Connector Platform and uses the Cyera's API to fetch Cyera's [DSPM Telemetry](https://www.cyera.com/) once recieced can be correlated with security events creating custom columns so that queries don't need to parse it again, thus resulting in better performance.", "Data Connectors": [ "Data Connectors/CyeraDSPM_CCF/CyeraDSPMLogs_ConnectorDefinitionCCF.json", diff --git a/Solutions/CyeraDSPM/Package/3.0.0.zip b/Solutions/CyeraDSPM/Package/3.0.0.zip index b5202633e05..98a179dfd29 100644 Binary files a/Solutions/CyeraDSPM/Package/3.0.0.zip and b/Solutions/CyeraDSPM/Package/3.0.0.zip differ diff --git a/Solutions/CyeraDSPM/Package/createUiDefinition.json b/Solutions/CyeraDSPM/Package/createUiDefinition.json index 0d869398719..49fbec58700 100644 --- a/Solutions/CyeraDSPM/Package/createUiDefinition.json +++ b/Solutions/CyeraDSPM/Package/createUiDefinition.json @@ -6,7 +6,7 @@ "config": { "isWizard": false, "basics": { - "description": "\n\n**Note:** Please refer to the following before installing the solution: \n\n• Review the solution [Release Notes](https://github.com/Azure/Azure-Sentinel/tree/master/Solutions/CyeraDSPM/ReleaseNotes.md)\n\n • There may be [known issues](https://aka.ms/sentinelsolutionsknownissues) pertaining to this Solution, please refer to them before installing.\n\nThe [Cyera DSPM](https://api.cyera.io/) data connector allows you to connect to your Cyera's DSPM instance and ingesting Classifications, Assets, Issues, and Identity Definitions into Microsoft Sentinel. The data connector is built on Microsoft Sentinel's Codeless Connector Platform and uses the Cyera's API to fetch Cyera's [DSPM Telemetry](https://www.cyera.com/) once recieced can be correlated with security events creating custom columns so that queries don't need to parse it again, thus resulting in better performance.\n\n**Data Connectors:** 2\n\n[Learn more about Microsoft Sentinel](https://aka.ms/azuresentinel) | [Learn more about Solutions](https://aka.ms/azuresentinelsolutionsdoc)", + "description": "\n\n**Note:** Please refer to the following before installing the solution: \n\n• Review the solution [Release Notes](https://github.com/Azure/Azure-Sentinel/tree/master/Solutions/CyeraDSPM/ReleaseNotes.md)\n\n • There may be [known issues](https://aka.ms/sentinelsolutionsknownissues) pertaining to this Solution, please refer to them before installing.\n\nThe [Cyera DSPM](https://api.cyera.io/) data connector allows you to connect to your Cyera's DSPM instance and ingesting Classifications, Assets, Issues, and Identity Definitions into Microsoft Sentinel. The data connector is built on Microsoft Sentinel's Codeless Connector Platform and uses the Cyera's API to fetch Cyera's [DSPM Telemetry](https://www.cyera.com/) once recieced can be correlated with security events creating custom columns so that queries don't need to parse it again, thus resulting in better performance.\n\n**Data Connectors:** 2\n\n[Learn more about Microsoft Sentinel](https://aka.ms/azuresentinel) | [Learn more about Solutions](https://aka.ms/azuresentinelsolutionsdoc)", "subscription": { "resourceProviders": [ "Microsoft.OperationsManagement/solutions", @@ -60,7 +60,7 @@ "name": "dataconnectors1-text", "type": "Microsoft.Common.TextBlock", "options": { - "text": "This Solution installs the data connector for Cyera DSPM Azure Sentinel Data Connector. You can get Cyera DSPM Azure Sentinel Data Connector data in your Microsoft Sentinel workspace. After installing the solution, configure and enable this data connector by following guidance in Manage solution view." + "text": "This Solution installs the data connector for Cyera DSPM Microsoft Sentinel Data Connector. You can get Cyera DSPM Microsoft Sentinel Data Connector data in your Microsoft Sentinel workspace. After installing the solution, configure and enable this data connector by following guidance in Manage solution view." } }, { diff --git a/Solutions/CyeraDSPM/Package/mainTemplate.json b/Solutions/CyeraDSPM/Package/mainTemplate.json index aef9de85dec..192675aa743 100644 --- a/Solutions/CyeraDSPM/Package/mainTemplate.json +++ b/Solutions/CyeraDSPM/Package/mainTemplate.json @@ -27,6 +27,20 @@ "metadata": { "description": "Workspace name for Log Analytics where Microsoft Sentinel is setup" } + }, + "resourceGroupName": { + "type": "string", + "defaultValue": "[resourceGroup().name]", + "metadata": { + "description": "resource group name where Microsoft Sentinel is setup" + } + }, + "subscription": { + "type": "string", + "defaultValue": "[last(split(subscription().id, '/'))]", + "metadata": { + "description": "subscription id where Microsoft Sentinel is setup" + } } }, "variables": { @@ -34,7 +48,7 @@ "_email": "[variables('email')]", "_solutionName": "CyeraDSPM", "_solutionVersion": "3.0.0", - "solutionId": "cyeradspm.azure-sentinel-solution-cyeradspm", + "solutionId": "cyera1658314682323.azure-sentinel-solution-cyeradspm", "_solutionId": "[variables('solutionId')]", "workspaceResourceId": "[resourceId('microsoft.OperationalInsights/Workspaces', parameters('workspace'))]", "dataConnectorCCPVersion": "1.0.0", @@ -65,7 +79,7 @@ ], "properties": { "contentId": "[variables('_dataConnectorContentIdConnectorDefinition1')]", - "displayName": "Cyera DSPM Azure Sentinel Data Connector", + "displayName": "Cyera DSPM Microsoft Sentinel Data Connector", "contentKind": "DataConnector", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", @@ -113,7 +127,7 @@ "name": "CyeraIdentities_CL" } ], - "descriptionMarkdown": "The [Cyera DSPM](https://api.cyera.io/) data connector allows you to connect to your Cyera's DSPM tenant and ingesting Classifications, Assets, Issues, and Identity Resources/Definitions into Microsoft Sentinel. The data connector is built on Microsoft Sentinel's Codeless Connector Framework and uses the Cyera's API to fetch Cyera's [DSPM Telemetry](https://www.cyera.com/) once recieced can be correlated with security events creating custom columns so that queries don't need to parse it again, thus resulting in better performance.", + "descriptionMarkdown": "The [Cyera DSPM](https://api.cyera.io/) data connector allows you to connect to your Cyera's DSPM tenant and ingesting Classifications, Assets, Issues, and Identity Resources/Definitions into Microsoft Sentinel. The data connector is built on Microsoft Sentinel's Codeless Connector Framework and uses the Cyera's API to fetch Cyera's [DSPM Telemetry](https://www.cyera.com/) once received can be correlated with security events creating custom columns so that queries don't need to parse it again, thus resulting in better performance.", "graphQueries": [ { "baseQuery": "CyeraClassifications_CL", @@ -449,7 +463,7 @@ ] }, "publisher": "Cyera Inc", - "title": "Cyera DSPM Azure Sentinel Data Connector" + "title": "Cyera DSPM Microsoft Sentinel Data Connector" }, "createdTimeUtc": "2025-10-31T15:13:26.2481931Z", "lastModifiedUtc": "2025-10-31T15:13:26.2481931Z" @@ -497,7 +511,7 @@ "location": "[parameters('workspace-location')]", "kind": "[variables('blanks')]", "properties": { - "dataCollectionEndpointId": "CyeraDSPMDataCollectionEndpoint", + "dataCollectionEndpointId": "[concat('/subscriptions/',parameters('subscription'),'/resourceGroups/',parameters('resourceGroupName'),'/providers/Microsoft.Insights/dataCollectionEndpoints/',parameters('workspace'))]", "streamDeclarations": { "Custom-CyeraAssets_SRC": { "columns": [ @@ -1791,7 +1805,7 @@ "name": "CyeraIdentities_CL" } ], - "descriptionMarkdown": "The [Cyera DSPM](https://api.cyera.io/) data connector allows you to connect to your Cyera's DSPM tenant and ingesting Classifications, Assets, Issues, and Identity Resources/Definitions into Microsoft Sentinel. The data connector is built on Microsoft Sentinel's Codeless Connector Framework and uses the Cyera's API to fetch Cyera's [DSPM Telemetry](https://www.cyera.com/) once recieced can be correlated with security events creating custom columns so that queries don't need to parse it again, thus resulting in better performance.", + "descriptionMarkdown": "The [Cyera DSPM](https://api.cyera.io/) data connector allows you to connect to your Cyera's DSPM tenant and ingesting Classifications, Assets, Issues, and Identity Resources/Definitions into Microsoft Sentinel. The data connector is built on Microsoft Sentinel's Codeless Connector Framework and uses the Cyera's API to fetch Cyera's [DSPM Telemetry](https://www.cyera.com/) once received can be correlated with security events creating custom columns so that queries don't need to parse it again, thus resulting in better performance.", "graphQueries": [ { "baseQuery": "CyeraClassifications_CL", @@ -2127,7 +2141,7 @@ ] }, "publisher": "Cyera Inc", - "title": "Cyera DSPM Azure Sentinel Data Connector" + "title": "Cyera DSPM Microsoft Sentinel Data Connector" }, "createdTimeUtc": "2025-10-31T15:13:26.2481931Z", "lastModifiedUtc": "2025-10-31T15:13:26.2481931Z" @@ -2178,7 +2192,7 @@ ], "properties": { "contentId": "[variables('_dataConnectorContentIdConnections1')]", - "displayName": "Cyera DSPM Azure Sentinel Data Connector", + "displayName": "Cyera DSPM Microsoft Sentinel Data Connector", "contentKind": "ResourcesDataConnector", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", @@ -2193,7 +2207,7 @@ "type": "securestring" }, "connectorDefinitionName": { - "defaultValue": "Cyera DSPM Azure Sentinel Data Connector", + "defaultValue": "Cyera DSPM Microsoft Sentinel Data Connector", "type": "securestring", "minLength": 1 }, @@ -2517,10 +2531,10 @@ "properties": { "connectorUiConfig": { "id": "[variables('_uiConfigId2')]", - "title": "Cyera DSPM Azure Functions Sentinel Data Connector", + "title": "Cyera DSPM Azure Functions Microsoft Sentinel Data Connector", "publisher": "Cyera Inc", "logo": "{{icon-url}}", - "descriptionMarkdown": "The **Cyera DSPM Azure Function Connector** enables seamless ingestion of Cyera’s **Data Security Posture Management (DSPM)** telemetry — *Assets*, *Identities*, *Issues*, and *Classifications* — into **Microsoft Sentinel**.\\n\\nThis connector uses an **Azure Function App** to call Cyera’s REST API on a schedule, fetch the latest DSPM telemetry, and send it to Sentinel through the **Azure Monitor Logs Ingestion API** via a **Data Collection Endpoint (DCE)** and **Data Collection Rule (DCR, kind: Direct)** — no agents required.\\n\\n**Tables created/used**\\n\\n| Entity | Table | Purpose |\\n|---|---|---|\\n| Assets | `CyeraAssets_CL` | Raw asset metadata and data-store context |\\n| Identities | `CyeraIdentities_CL` | Identity definitions and sensitivity context |\\n| Issues | `CyeraIssues_CL` | Findings and remediation details |\\n| Classifications | `CyeraClassifications_CL` | Data class & sensitivity definitions |\\n| MS View | `CyeraAssets_MS_CL` | Normalized asset view for dashboards |\\n\\n> **Note:** This v7 connector supersedes the earlier CCF-based approach and aligns with Microsoft’s recommended Direct ingestion path for Sentinel.", + "descriptionMarkdown": "The **Cyera DSPM Azure Function Connector** enables seamless ingestion of Cyera’s **Data Security Posture Management (DSPM)** telemetry — *Assets*, *Identities*, *Issues*, and *Classifications* — into **Microsoft Sentinel**.\\n\\nThis connector uses an **Azure Function App** to call Cyera’s REST API on a schedule, fetch the latest DSPM telemetry, and send it to Microsoft Sentinel through the **Azure Monitor Logs Ingestion API** via a **Data Collection Endpoint (DCE)** and **Data Collection Rule (DCR, kind: Direct)** — no agents required.\\n\\n**Tables created/used**\\n\\n| Entity | Table | Purpose |\\n|---|---|---|\\n| Assets | `CyeraAssets_CL` | Raw asset metadata and data-store context |\\n| Identities | `CyeraIdentities_CL` | Identity definitions and sensitivity context |\\n| Issues | `CyeraIssues_CL` | Findings and remediation details |\\n| Classifications | `CyeraClassifications_CL` | Data class & sensitivity definitions |\\n| MS View | `CyeraAssets_MS_CL` | Normalized asset view for dashboards |\\n\\n> **Note:** This v7 connector supersedes the earlier CCF-based approach and aligns with Microsoft’s recommended Direct ingestion path for Microsoft Sentinel.", "additionalRequirementBanner": "This connector deploys KQL transforms in the DCR. No parser function is required.", "graphQueries": [ { @@ -2673,7 +2687,7 @@ { "parameters": { "fillWith": [ - "{{workspace}}" + "{{workspace-location}}" ], "label": "Workspace Name" }, @@ -2765,7 +2779,7 @@ "contentSchemaVersion": "3.0.0", "contentId": "[variables('_dataConnectorContentId2')]", "contentKind": "DataConnector", - "displayName": "Cyera DSPM Azure Functions Sentinel Data Connector", + "displayName": "Cyera DSPM Azure Functions Microsoft Sentinel Data Connector", "contentProductId": "[variables('_dataConnectorcontentProductId2')]", "id": "[variables('_dataConnectorcontentProductId2')]", "version": "[variables('dataConnectorVersion2')]" @@ -2809,9 +2823,9 @@ "kind": "GenericUI", "properties": { "connectorUiConfig": { - "title": "Cyera DSPM Azure Functions Sentinel Data Connector", + "title": "Cyera DSPM Azure Functions Microsoft Sentinel Data Connector", "publisher": "Cyera Inc", - "descriptionMarkdown": "The **Cyera DSPM Azure Function Connector** enables seamless ingestion of Cyera’s **Data Security Posture Management (DSPM)** telemetry — *Assets*, *Identities*, *Issues*, and *Classifications* — into **Microsoft Sentinel**.\\n\\nThis connector uses an **Azure Function App** to call Cyera’s REST API on a schedule, fetch the latest DSPM telemetry, and send it to Sentinel through the **Azure Monitor Logs Ingestion API** via a **Data Collection Endpoint (DCE)** and **Data Collection Rule (DCR, kind: Direct)** — no agents required.\\n\\n**Tables created/used**\\n\\n| Entity | Table | Purpose |\\n|---|---|---|\\n| Assets | `CyeraAssets_CL` | Raw asset metadata and data-store context |\\n| Identities | `CyeraIdentities_CL` | Identity definitions and sensitivity context |\\n| Issues | `CyeraIssues_CL` | Findings and remediation details |\\n| Classifications | `CyeraClassifications_CL` | Data class & sensitivity definitions |\\n| MS View | `CyeraAssets_MS_CL` | Normalized asset view for dashboards |\\n\\n> **Note:** This v7 connector supersedes the earlier CCF-based approach and aligns with Microsoft’s recommended Direct ingestion path for Sentinel.", + "descriptionMarkdown": "The **Cyera DSPM Azure Function Connector** enables seamless ingestion of Cyera’s **Data Security Posture Management (DSPM)** telemetry — *Assets*, *Identities*, *Issues*, and *Classifications* — into **Microsoft Sentinel**.\\n\\nThis connector uses an **Azure Function App** to call Cyera’s REST API on a schedule, fetch the latest DSPM telemetry, and send it to Microsoft Sentinel through the **Azure Monitor Logs Ingestion API** via a **Data Collection Endpoint (DCE)** and **Data Collection Rule (DCR, kind: Direct)** — no agents required.\\n\\n**Tables created/used**\\n\\n| Entity | Table | Purpose |\\n|---|---|---|\\n| Assets | `CyeraAssets_CL` | Raw asset metadata and data-store context |\\n| Identities | `CyeraIdentities_CL` | Identity definitions and sensitivity context |\\n| Issues | `CyeraIssues_CL` | Findings and remediation details |\\n| Classifications | `CyeraClassifications_CL` | Data class & sensitivity definitions |\\n| MS View | `CyeraAssets_MS_CL` | Normalized asset view for dashboards |\\n\\n> **Note:** This v7 connector supersedes the earlier CCF-based approach and aligns with Microsoft’s recommended Direct ingestion path for Microsoft Sentinel.", "graphQueries": [ { "metricName": "cyera_functions_classifications", @@ -2963,7 +2977,7 @@ { "parameters": { "fillWith": [ - "{{workspace}}" + "{{workspace-location}}" ], "label": "Workspace Name" }, @@ -3036,7 +3050,7 @@ "contentKind": "Solution", "contentProductId": "[variables('_solutioncontentProductId')]", "id": "[variables('_solutioncontentProductId')]", - "icon": "", + "icon": "", "contentId": "[variables('_solutionId')]", "parentId": "[variables('_solutionId')]", "source": { @@ -3076,7 +3090,7 @@ ], "categories": { "domains": [ - "Security - Data Security Posture Management" + "Security - Information Protection" ] } }, diff --git a/Solutions/CyeraDSPM/SolutionMetadata.json b/Solutions/CyeraDSPM/SolutionMetadata.json index b6ee79373bd..0f5eaf496b7 100644 --- a/Solutions/CyeraDSPM/SolutionMetadata.json +++ b/Solutions/CyeraDSPM/SolutionMetadata.json @@ -1,11 +1,11 @@ { - "publisherId": "cyeradspm", + "publisherId": "cyera1658314682323", "offerId": "azure-sentinel-solution-cyeradspm", "firstPublishDate": "2025-10-15", "lastPublishDate": "2025-10-29", "providers": [ "Cyera" ], "categories": { - "domains": ["Security - Data Security Posture Management"], + "domains": ["Security - Information Protection"], "verticals": [] }, "support": { diff --git a/Solutions/GDPR Compliance & Data Security/Workbooks/GDPRComplianceAndDataSecurity.json b/Solutions/GDPR Compliance & Data Security/Workbooks/GDPRComplianceAndDataSecurity.json index e2d91bd122a..8e858bbb82a 100644 --- a/Solutions/GDPR Compliance & Data Security/Workbooks/GDPRComplianceAndDataSecurity.json +++ b/Solutions/GDPR Compliance & Data Security/Workbooks/GDPRComplianceAndDataSecurity.json @@ -514,7 +514,7 @@ { "type": 1, "content": { - "json": "---\r\n\r\n### 1. Security Alerts and Incidents\r\n\r\nFrom the Azure portal, install the **[Microsoft Defender XDR](https://portal.azure.com/#view/Microsoft_Azure_Marketplace/GalleryItemDetailsBladeNopdl/dontDiscardJourney~/true/id/azuresentinel.azure-sentinel-solution-microsoft365defender)** solution via **Content Hub**. \r\nThen, enable the **Microsoft Defender XDR** data connector to stream security alerts and incidents from Defender products into Microsoft Sentinel. \r\nThese records populate the **`SecurityAlert`** and **`SecurityIncident`** tables. \r\n\r\n⚠️ **Important:** \r\nAll workbook metrics in this section use a **watchlist** to filter only alerts and incidents involving servers that host **personal data**. \r\nYou must configure this watchlist in Sentinel and populate it with the names of your personal data hosting servers.\r\n\r\n#### 📂 Sample Watchlist (GDPR_PersonalData_Assets)\r\n\r\n| HostName |\r\n|------------------------|\r\n| server1 |\r\n| server2 |\r\n| server3 |\r\n| server4 |\r\n\r\n1. Save the watchlist as a CSV or TXT file. \r\n2. In Sentinel → **Configuration > Watchlists**, create a new watchlist (e.g., `GDPR_PersonalData_Assets`). \r\n3. Upload the file and confirm `HostName` is recognized as the search key.\r\n\r\nThis allows you to: \r\n- Focus alerts and incidents on GDPR-relevant systems \r\n- Monitor attack tactics and timelines against personal data servers \r\n- Provide auditors with clear evidence of incident detection and response for regulated data \r\n\r\nAll **Security Alerts & Incidents** visuals in this workbook will only display events related to servers listed in this watchlist.\r\n\r\n📘 [Setup guide – Microsoft Defender XDR connector](https://learn.microsoft.com/azure/sentinel/connect-microsoft-365-defender) \r\n📘 [How to create and use watchlists](https://learn.microsoft.com/azure/sentinel/watchlists)\r\n" + "json": "---\r\n\r\n### 1. Security Alerts and Incidents\r\n\r\nFrom the Azure portal, install the **[Microsoft Defender XDR](https://portal.azure.com/#view/Microsoft_Azure_Marketplace/GalleryItemDetailsBladeNopdl/dontDiscardJourney~/true/id/azuresentinel.azure-sentinel-solution-microsoft365defender)** solution via **Content Hub**. \r\nThen, enable the **Microsoft Defender XDR** data connector to stream security alerts and incidents from Defender products into Microsoft Sentinel. \r\nThese records populate the **`SecurityAlert`** and **`SecurityIncident`** tables. \r\n\r\n⚠️ **Important:** \r\nAll workbook metrics in this section use a **watchlist** to filter only alerts and incidents involving servers or cloud resources that host **personal data**. \r\nYou must configure this watchlist in Sentinel and populate it with the names of your personal data hosting assets.\r\n\r\n#### 📂 Sample Watchlist (GDPR_PersonalData_Assets)\r\n\r\n| AssetName |\r\n|------------------------|\r\n| server1 |\r\n| server2 |\r\n| server3 |\r\n| server4 |\r\n\r\n1. Save the watchlist as a CSV or TXT file. \r\n2. In Sentinel → **Configuration > Watchlists**, create a new watchlist (e.g., `GDPR_PersonalData_Assets`). \r\n3. Upload the file and confirm `AssetName` is recognized as the search key.\r\n\r\nThis allows you to: \r\n- Focus alerts and incidents on GDPR-relevant systems \r\n- Monitor attack tactics and timelines against personal data servers or cloud resources\r\n- Provide auditors with clear evidence of incident detection and response for regulated data \r\n\r\nAll **Security Alerts & Incidents** visuals in this workbook will only display events related to assets listed in this watchlist.\r\n\r\n📘 [Setup guide – Microsoft Defender XDR connector](https://learn.microsoft.com/azure/sentinel/connect-microsoft-365-defender) \r\n📘 [How to create and use watchlists](https://learn.microsoft.com/azure/sentinel/watchlists)\r\n" }, "customWidth": "40", "name": "text - 6", @@ -648,7 +648,7 @@ "type": 3, "content": { "version": "KqlItem/1.0", - "query": "// Load personal data servers from Sentinel watchlist\r\nlet PersonalDataServers = _GetWatchlist('GDPR_PersonalData_Assets')\r\n | project HostName = tolower(HostName);\r\nSecurityAlert\r\n| mv-expand Entity = todynamic(Entities)\r\n| extend EntityType = tostring(Entity.Type)\r\n| extend HostName = iff(EntityType == \"host\",tolower(tostring(Entity.HostName)), \"\")\r\n| where HostName <> \"\"\r\n// Keep only alerts where HostName is in the watchlist\r\n| join kind=inner (PersonalDataServers) on HostName\r\n| extend DeviceName = HostName, AlertId = SystemAlertId\r\n| summarize by AlertId, AlertName, TimeGenerated\r\n| make-series Alerts = count() on TimeGenerated step 1d by AlertName", + "query": "// Load personal data assets from Sentinel watchlist\r\nlet PersonalDataAssets = _GetWatchlist('GDPR_PersonalData_Assets')\r\n | project AssetName = tolower(AssetName);\r\nSecurityAlert\r\n| mv-expand Entities = todynamic(Entities)\r\n| extend EntityType = tostring(Entities.Type)\r\n| extend EntityName = tolower(\r\n case(\r\n EntityType == \"host\", coalesce(tostring(Entities.HostName), tostring(Entities.FQDN)),\r\n EntityType == \"azure-resource\", tostring(Entities.ResourceId),\r\n EntityType == \"amazon-resources\", tostring(Entities.AmazonResourceId),\r\n EntityType == \"gcp-resource\", tostring(Entities.FullResourceName),\r\n EntityType == \"blob-container\", tostring(Entities.Name),\r\n EntityType == \"blob\", tostring(Entities.Name) ,\"\"))\r\n| where EntityName <> \"\"\r\n// Keep only alerts where EntityName is in the watchlist\r\n| where EntityName has_any (PersonalDataAssets)\r\n| extend DeviceName = EntityName, AlertId = SystemAlertId\r\n| summarize by AlertId, AlertName, TimeGenerated\r\n| make-series Alerts = count() on TimeGenerated step 1d by AlertName", "size": 0, "title": "Alerts Over Time for Personal Data Hosting Systems", "timeContextFromParameter": "TimeRange", @@ -666,7 +666,7 @@ "type": 3, "content": { "version": "KqlItem/1.0", - "query": "// Load personal data servers from Sentinel watchlist\r\nlet PersonalDataServers = _GetWatchlist('GDPR_PersonalData_Assets')\r\n | project HostName = tolower(HostName);\r\nSecurityAlert\r\n| mv-expand Entity = todynamic(Entities)\r\n| extend EntityType = tostring(Entity.Type)\r\n| extend HostName = iff(EntityType == \"host\",tolower(tostring(Entity.HostName)), \"\")\r\n| where HostName <> \"\"\r\n// Keep only alerts where HostName is in the watchlist\r\n| join kind=inner (PersonalDataServers) on HostName\r\n| summarize \r\n AlertName = any(AlertName),\r\n AlertSeverity = any(AlertSeverity),\r\n DeviceNames = make_set(HostName,10),\r\n TimeGenerated = any(TimeGenerated)\r\n by AlertId = SystemAlertId, AlertLink\r\n | project-reorder AlertName, AlertSeverity, AlertLink, DeviceNames, TimeGenerated, AlertId\r\n| order by TimeGenerated desc\r\n| take 100", + "query": "// Load personal data assets from Sentinel watchlist\r\nlet PersonalDataAssets = _GetWatchlist('GDPR_PersonalData_Assets')\r\n | project AssetName = tolower(AssetName);\r\nSecurityAlert\r\n| mv-expand Entities = todynamic(Entities)\r\n| extend EntityType = tostring(Entities.Type)\r\n| extend EntityName = tolower(\r\n case(\r\n EntityType == \"host\", coalesce(tostring(Entities.HostName), tostring(Entities.FQDN)),\r\n EntityType == \"azure-resource\", tostring(Entities.ResourceId),\r\n EntityType == \"amazon-resources\", tostring(Entities.AmazonResourceId),\r\n EntityType == \"gcp-resource\", tostring(Entities.FullResourceName),\r\n EntityType == \"blob-container\", tostring(Entities.Name),\r\n EntityType == \"blob\", tostring(Entities.Name) ,\"\"))\r\n| where EntityName <> \"\"\r\n// Keep only alerts where EntityName is in the watchlist\r\n| where EntityName has_any (PersonalDataAssets)\r\n| summarize \r\n AlertName = any(AlertName),\r\n AlertSeverity = any(AlertSeverity),\r\n DeviceNames = make_set(EntityName,10),\r\n TimeGenerated = any(TimeGenerated)\r\n by AlertId = SystemAlertId, AlertLink\r\n | project-reorder AlertName, AlertSeverity, AlertLink, DeviceNames, TimeGenerated, AlertId\r\n| order by TimeGenerated desc\r\n| take 100", "size": 0, "title": "Alerts Details", "timeContextFromParameter": "TimeRange", @@ -695,7 +695,7 @@ "type": 3, "content": { "version": "KqlItem/1.0", - "query": "// Load personal data servers from Sentinel watchlist\r\nlet PersonalDataServers = _GetWatchlist('GDPR_PersonalData_Assets')\r\n | project HostName = tolower(HostName);\r\nSecurityAlert\r\n| mv-expand Entity = todynamic(Entities)\r\n| extend EntityType = tostring(Entity.Type)\r\n| extend HostName = iff(EntityType == \"host\",tolower(tostring(Entity.HostName)), \"\")\r\n| where HostName <> \"\"\r\n// Keep only alerts where HostName is in the watchlist\r\n| join kind=inner (PersonalDataServers) on HostName\r\n| summarize by Tactics, SystemAlertId\r\n| summarize Count=count() by Tactics\r\n| sort by Count desc", + "query": "// Load personal data assets from Sentinel watchlist\r\nlet PersonalDataAssets = _GetWatchlist('GDPR_PersonalData_Assets')\r\n | project AssetName = tolower(AssetName);\r\nSecurityAlert\r\n| mv-expand Entities = todynamic(Entities)\r\n| extend EntityType = tostring(Entities.Type)\r\n| extend EntityName = tolower(\r\n case(\r\n EntityType == \"host\", coalesce(tostring(Entities.HostName), tostring(Entities.FQDN)),\r\n EntityType == \"azure-resource\", tostring(Entities.ResourceId),\r\n EntityType == \"amazon-resources\", tostring(Entities.AmazonResourceId),\r\n EntityType == \"gcp-resource\", tostring(Entities.FullResourceName),\r\n EntityType == \"blob-container\", tostring(Entities.Name),\r\n EntityType == \"blob\", tostring(Entities.Name) ,\"\"))\r\n| where EntityName <> \"\"\r\n// Keep only alerts where EntityName is in the watchlist\r\n| where EntityName has_any (PersonalDataAssets)\r\n| summarize by Tactics, SystemAlertId\r\n| summarize Count=count() by Tactics\r\n| sort by Count desc", "size": 0, "title": "Alerts by MITRE ATT&CK® Tactics", "timeContextFromParameter": "TimeRange", @@ -726,7 +726,7 @@ "type": 3, "content": { "version": "KqlItem/1.0", - "query": "// Load personal data servers from Sentinel watchlist\r\nlet PersonalDataServers = _GetWatchlist('GDPR_PersonalData_Assets')\r\n | project HostName = tolower(HostName);\r\nSecurityIncident\r\n | summarize hint.strategy = shuffle arg_max(LastModifiedTime, *) by IncidentNumber\r\n | mv-expand AlertIds\r\n | extend AlertId = tostring(AlertIds)\r\n | join kind= innerunique ( \r\n SecurityAlert \r\n )\r\n on $left.AlertId == $right.SystemAlertId\r\n | summarize hint.strategy = shuffle arg_max(TimeGenerated, *), NumberOfUpdates = count() by SystemAlertId\r\n | mv-expand todynamic(Entities)\r\n | where Entities[\"Type\"] =~ \"host\"\r\n | extend HostName = tolower(tostring(Entities.HostName))\r\n | where Entities[\"HostName\"] <> \"\"\r\n // Keep only alerts where HostName is in the watchlist\r\n | join kind=inner (PersonalDataServers) on HostName\r\n | extend Href_ = tostring(parse_json(ExtendedLinks)[0].Href)\r\n | summarize DeviceNames = make_set(HostName,10), arg_max(TimeGenerated, *) by IncidentNumber\r\n | parse IncidentUrl with * '/#asset/Microsoft_Azure_Security_Insights/Incident' IncidentBlade\r\n | distinct Title, Severity, IncidentBlade, tostring(DeviceNames), TimeGenerated, IncidentNumber\r\n| make-series count() default=0 on TimeGenerated from {TimeRange:start} to {TimeRange:end} step 1d by Title\r\n| render timechart\r\n\r\n\r\n\r\n", + "query": "// Load personal data assets from Sentinel watchlist\r\nlet PersonalDataAssets = _GetWatchlist('GDPR_PersonalData_Assets')\r\n | project AssetName = tolower(AssetName);\r\nSecurityIncident\r\n| summarize hint.strategy = shuffle arg_max(LastModifiedTime, *) by IncidentNumber\r\n| mv-expand AlertIds\r\n| extend AlertId = tostring(AlertIds)\r\n| join kind= innerunique ( \r\n SecurityAlert \r\n )\r\n on $left.AlertId == $right.SystemAlertId\r\n| summarize hint.strategy = shuffle arg_max(TimeGenerated, *), NumberOfUpdates = count() by SystemAlertId\r\n| mv-expand todynamic(Entities)\r\n| extend EntityType = tostring(Entities.Type)\r\n| extend EntityName = tolower(\r\n case(\r\n EntityType == \"host\",\r\n coalesce(tostring(Entities.HostName), tostring(Entities.FQDN)),\r\n EntityType == \"azure-resource\",\r\n tostring(Entities.ResourceId),\r\n EntityType == \"amazon-resources\",\r\n tostring(Entities.AmazonResourceId),\r\n EntityType == \"gcp-resource\",\r\n tostring(Entities.FullResourceName),\r\n EntityType == \"blob-container\",\r\n tostring(Entities.Name),\r\n EntityType == \"blob\",\r\n tostring(Entities.Name),\r\n \"\"\r\n)\r\n )\r\n| where EntityName <> \"\"\r\n// Keep only alerts where EntityName is in the watchlist\r\n| where EntityName has_any (PersonalDataAssets)\r\n| extend Href_ = tostring(parse_json(ExtendedLinks)[0].Href)\r\n| summarize DeviceNames = make_set(EntityName, 10), arg_max(TimeGenerated, *) by IncidentNumber\r\n| parse IncidentUrl with * '/#asset/Microsoft_Azure_Security_Insights/Incident' IncidentBlade\r\n| distinct\r\n Title,\r\n Severity,\r\n IncidentBlade,\r\n tostring(DeviceNames),\r\n TimeGenerated,\r\n IncidentNumber\r\n| make-series count() default=0 on TimeGenerated from {TimeRange:start} to {TimeRange:end} step 1d by Title\r\n| render timechart", "size": 0, "showAnalytics": true, "title": "Security Incidents Over Time for Personal Data Hosting Systems", @@ -889,7 +889,7 @@ "type": 3, "content": { "version": "KqlItem/1.0", - "query": "// Load personal data servers from Sentinel watchlist\r\nlet PersonalDataServers = _GetWatchlist('GDPR_PersonalData_Assets')\r\n | project HostName = tolower(HostName);\r\nSecurityIncident\r\n | summarize hint.strategy = shuffle arg_max(LastModifiedTime, *) by IncidentNumber\r\n | mv-expand AlertIds\r\n | extend AlertId = tostring(AlertIds)\r\n | join kind= innerunique ( \r\n SecurityAlert \r\n )\r\n on $left.AlertId == $right.SystemAlertId\r\n | summarize hint.strategy = shuffle arg_max(TimeGenerated, *), NumberOfUpdates = count() by SystemAlertId\r\n | extend EntitiesSet = todynamic(Entities)\r\n | mv-expand todynamic(Entities)\r\n | where Entities[\"Type\"] =~ \"host\"\r\n | extend HostName = tolower(tostring(Entities.HostName))\r\n | where Entities[\"HostName\"] <> \"\"\r\n // Keep only alerts where HostName is in the watchlist\r\n | join kind=inner (PersonalDataServers) on HostName\r\n | extend Href_ = tostring(parse_json(ExtendedLinks)[0].Href)\r\n | summarize DeviceNames = make_set(HostName,10), arg_max(TimeGenerated, *) by IncidentNumber\r\n | parse IncidentUrl with * '/#asset/Microsoft_Azure_Security_Insights/Incident' IncidentBlade\r\n | mv-expand todynamic(EntitiesSet)\r\n | extend Name = tostring(tolower(EntitiesSet[\"Name\"])), UPNSuffix = tostring(EntitiesSet[\"UPNSuffix\"])\r\n | extend UPN = iff(Name != \"\" and UPNSuffix != \"\", strcat(Name, \"@\", UPNSuffix), \"\")\r\n | where UPN <> \"\"\r\n | summarize count() by UPN\r\n | render piechart", + "query": "// Load personal data assets from Sentinel watchlist\r\nlet PersonalDataAssets = _GetWatchlist('GDPR_PersonalData_Assets')\r\n | project AssetName = tolower(AssetName);\r\nSecurityIncident\r\n | summarize hint.strategy = shuffle arg_max(LastModifiedTime, *) by IncidentNumber\r\n | mv-expand AlertIds\r\n | extend AlertId = tostring(AlertIds)\r\n | join kind= innerunique ( \r\n SecurityAlert \r\n )\r\n on $left.AlertId == $right.SystemAlertId\r\n | summarize hint.strategy = shuffle arg_max(TimeGenerated, *), NumberOfUpdates = count() by SystemAlertId\r\n | extend EntitiesSet = todynamic(Entities)\r\n | mv-expand todynamic(Entities)\r\n | extend EntityType = tostring(Entities.Type)\r\n | extend EntityName = tolower(\r\n case(\r\n EntityType == \"host\", coalesce(tostring(Entities.HostName), tostring(Entities.FQDN)),\r\n EntityType == \"azure-resource\", tostring(Entities.ResourceId),\r\n EntityType == \"amazon-resources\", tostring(Entities.AmazonResourceId),\r\n EntityType == \"gcp-resource\", tostring(Entities.FullResourceName),\r\n EntityType == \"blob-container\", tostring(Entities.Name),\r\n EntityType == \"blob\", tostring(Entities.Name) ,\"\"))\r\n| where EntityName <> \"\"\r\n// Keep only alerts where EntityName is in the watchlist\r\n| where EntityName has_any (PersonalDataAssets)\r\n | extend Href_ = tostring(parse_json(ExtendedLinks)[0].Href)\r\n | summarize DeviceNames = make_set(EntityName,10), arg_max(TimeGenerated, *) by IncidentNumber\r\n | parse IncidentUrl with * '/#asset/Microsoft_Azure_Security_Insights/Incident' IncidentBlade\r\n | mv-expand todynamic(EntitiesSet)\r\n | extend Name = tostring(tolower(EntitiesSet[\"Name\"])), UPNSuffix = tostring(EntitiesSet[\"UPNSuffix\"])\r\n | extend UPN = iff(Name != \"\" and UPNSuffix != \"\", strcat(Name, \"@\", UPNSuffix), \"\")\r\n | where UPN <> \"\"\r\n | summarize count() by UPN\r\n | render piechart", "size": 0, "showAnalytics": true, "title": "Security Incidents by Users", @@ -1099,7 +1099,7 @@ "type": 3, "content": { "version": "KqlItem/1.0", - "query": "// Load personal data servers from Sentinel watchlist\r\nlet PersonalDataServers = _GetWatchlist('GDPR_PersonalData_Assets')\r\n | project HostName = tolower(HostName);\r\nSecurityIncident\r\n | summarize hint.strategy = shuffle arg_max(LastModifiedTime, *) by IncidentNumber\r\n | mv-expand AlertIds\r\n | extend AlertId = tostring(AlertIds)\r\n | join kind= innerunique ( \r\n SecurityAlert \r\n )\r\n on $left.AlertId == $right.SystemAlertId\r\n | summarize hint.strategy = shuffle arg_max(TimeGenerated, *), NumberOfUpdates = count() by SystemAlertId\r\n | mv-expand todynamic(Entities)\r\n | where Entities[\"Type\"] =~ \"host\"\r\n | extend HostName = tolower(tostring(Entities.HostName))\r\n | where Entities[\"HostName\"] <> \"\"\r\n // Keep only alerts where HostName is in the watchlist\r\n | join kind=inner (PersonalDataServers) on HostName\r\n | extend Href_ = tostring(parse_json(ExtendedLinks)[0].Href)\r\n | summarize DeviceNames = make_set(HostName,10), arg_max(TimeGenerated, *) by IncidentNumber\r\n | parse IncidentUrl with * '/#asset/Microsoft_Azure_Security_Insights/Incident' IncidentBlade\r\n | distinct Title, Severity, IncidentBlade, tostring(DeviceNames), TimeGenerated, IncidentNumber \r\n | sort by TimeGenerated desc\r\n | limit 100", + "query": "// Load personal data assets from Sentinel watchlist\r\nlet PersonalDataAssets = _GetWatchlist('GDPR_PersonalData_Assets')\r\n | project AssetName = tolower(AssetName);\r\nSecurityIncident\r\n | summarize hint.strategy = shuffle arg_max(LastModifiedTime, *) by IncidentNumber\r\n | mv-expand AlertIds\r\n | extend AlertId = tostring(AlertIds)\r\n | join kind= innerunique ( \r\n SecurityAlert \r\n )\r\n on $left.AlertId == $right.SystemAlertId\r\n | summarize hint.strategy = shuffle arg_max(TimeGenerated, *), NumberOfUpdates = count() by SystemAlertId\r\n | mv-expand todynamic(Entities)\r\n | extend EntityType = tostring(Entities.Type)\r\n | extend EntityName = tolower(\r\n case(\r\n EntityType == \"host\", coalesce(tostring(Entities.HostName), tostring(Entities.FQDN)),\r\n EntityType == \"azure-resource\", tostring(Entities.ResourceId),\r\n EntityType == \"amazon-resources\", tostring(Entities.AmazonResourceId),\r\n EntityType == \"gcp-resource\", tostring(Entities.FullResourceName),\r\n EntityType == \"blob-container\", tostring(Entities.Name),\r\n EntityType == \"blob\", tostring(Entities.Name) ,\"\"))\r\n | where EntityName <> \"\"\r\n // Keep only alerts where EntityName is in the watchlist\r\n | where EntityName has_any (PersonalDataAssets)\r\n | extend Href_ = tostring(parse_json(ExtendedLinks)[0].Href)\r\n | summarize DeviceNames = make_set(EntityName,10), arg_max(TimeGenerated, *) by IncidentNumber\r\n | parse IncidentUrl with * '/#asset/Microsoft_Azure_Security_Insights/Incident' IncidentBlade\r\n | distinct Title, Severity, IncidentBlade, tostring(DeviceNames), TimeGenerated, IncidentNumber \r\n | sort by TimeGenerated desc\r\n | limit 100", "size": 0, "showAnalytics": true, "title": "Security Incidents Details", diff --git a/Solutions/GravityZone/Analytic Rules/Incidents.yaml b/Solutions/GravityZone/Analytic Rules/Incidents.yaml new file mode 100644 index 00000000000..e761838871b --- /dev/null +++ b/Solutions/GravityZone/Analytic Rules/Incidents.yaml @@ -0,0 +1,48 @@ +id: 73c803aa-1188-45dd-8379-62a3319d3d9f +name: NRT GravityZone Incident Alerts +description: The query identifies incident-level events received from the GravityZone Data Connector +version: 1.0.0 +kind: NRT +severity: Medium +status: Available +requiredDataConnectors: + - connectorId: GravityZoneDataConnector + dataTypes: + - ASimAlertEventBitdefenderGravityZone +tactics: +relevantTechniques: +query: | + ASimAlertEventBitdefenderGravityZone + | extend IncidentType = case( + AdditionalFields.Module == "new-incident", "EDR Incident", + AdditionalFields.Module == "new-extended-incident", "XDR Incident", + AdditionalFields.Module == "ransomware-mitigation", "Ransomware Mitigation", + AdditionalFields.Module == "network-sandboxing", "Sandbox Analyzer Detection", + AdditionalFields.Module == "exchange-malware", "Exchange Malware Detection", + "Incident" // fallback value if null or unmatched + ), + Tactics = AdditionalFields.AttackTypes + | project EventUid, EventSeverity, EventStartTime, IncidentType, Tactics, EventVendor, EventProduct, DvcId, DvcIpAddr, DvcHostname, DvcAction, DvcFQDN +eventGroupingSettings: + aggregationKind: AlertPerResult +entityMappings: + - entityType: Host + fieldMappings: + - identifier: HostName + columnName: DvcHostname + - entityType: IP + fieldMappings: + - identifier: Address + columnName: DvcIpAddr +alertDetailsOverride: + alertSeverityColumnName: EventSeverity + alertDisplayNameFormat: 'GravityZone: {{IncidentType}}' + alertDescriptionFormat: | + Alert generated on {{EventStartTime}} in Bitdefender GravityZone.\n\nGravityZone Incident ID / Alert GUID: {{EventUid}}\n\nPlease check the source for more information and investigate further. + alertTacticsColumnName: Tactics + alertDynamicProperties: + - alertProperty: ProductName + value: EventProduct + - alertProperty: ProviderName + value: EventVendor + diff --git a/Solutions/GravityZone/Data Connectors/GravityZone_API.json b/Solutions/GravityZone/Data Connectors/GravityZone_API.json new file mode 100644 index 00000000000..cd2a8b64c79 --- /dev/null +++ b/Solutions/GravityZone/Data Connectors/GravityZone_API.json @@ -0,0 +1,93 @@ +{ + "id": "GravityZoneDataConnector", + "title": "GravityZone Data Connector", + "publisher": "Bitdefender", + "descriptionMarkdown": "This connector enables integration between **Bitdefender GravityZone** and **Microsoft Sentinel** through the **Event Push Service API**. Once configured, it streams all GravityZone event types directly into your Sentinel workspace, where they are stored as logs in the `GzSecurityEvents_CL` table.\n\nKey event categories such as **EDR, XDR, ransomware mitigation, network sandboxing, and Exchange malware events** can be automatically correlated and generate incidents through the **NRT GravityZone Incident Alerts** analytics rule.", + "graphQueries": [ + { + "metricName": "Total events received", + "legend": "Events", + "baseQuery": "GzSecurityEvents_CL" + } + ], + "sampleQueries": [ + { + "description": "Get Sample Events", + "query": "GzSecurityEvents_CL\n | take 10" + } + ], + "dataTypes": [ + { + "name": "GzSecurityEvents_CL", + "lastDataReceivedQuery": "GzSecurityEvents_CL\n | summarize Time = max(TimeGenerated)" + } + ], + "connectivityCriterias": [ + { + "type": "IsConnectedQuery", + "value": [ + "GzSecurityEvents_CL\n | summarize LastLogReceived = max(TimeGenerated)\n | project IsConnected = LastLogReceived > ago(7d)" + ] + } + ], + "availability": { + "status": 1, + "isPreview": false + }, + "permissions": { + "resourceProvider": [ + { + "provider": "Microsoft.OperationalInsights/workspaces", + "permissionsDisplayText": "read and write permissions on the workspace are required.", + "providerDisplayName": "Workspace", + "scope": "Workspace", + "requiredPermissions": { + "write": true, + "read": true, + "delete": true + } + }, + { + "provider": "Microsoft.Insights/DataCollectionRules", + "permissionsDisplayText": "read and write permissions to create data collection rules are required.", + "providerDisplayName": "Data Collection Rule", + "scope": "ResourceGroup", + "requiredPermissions": { + "Write": true, + "Read": true, + "Delete": false + } + }, + { + "provider": "Microsoft.Insights/DataCollectionEndpoints", + "permissionsDisplayText": "read and write permissions to create data collection endpoints are required.", + "providerDisplayName": "Data Collection Endpoint", + "scope": "ResourceGroup", + "requiredPermissions": { + "Write": true, + "Read": true, + "Delete": false + } + } + ], + "customs": [ + { + "name": "Azure App Registration", + "description": "Microsoft Entra App Registration with the following details retained Directory (Tenant) ID, Application (Client) ID, Managed Service Principal Object ID (from the Enterprise Applications entry of the app), Client Secret (generated under Certificates & secrets)." + }, + { + "name": "GravityZone Cloud Account", + "description": "A GravityZone Cloud account with a generated API key for the Event Push Service endpoint." + }, + { + "name": "Read our guide", + "description": "Follow this step-by-step article to set up the integration. [Customers](https://www.bitdefender.com/business/support/en/77209-1455218-integrate-gravityzone-with-azure-sentinel.html) | [Partners](https://www.bitdefender.com/business/support/en/77211-1455218-integrate-gravityzone-with-azure-sentinel.html)" + } + ] + }, + "instructionSteps": [ + { + "description": "1. Click the **Deploy to Azure** button below and fill in the required parameters. \n\n\t\n\n[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-gravityzone-azuredeploy)\n\n2. Collect the **Logs Ingestion** URL from `gz-sentinel-dce` [Data Collection Endpoint](https://portal.azure.com/#view/HubsExtension/BrowseResource.ReactView/resourceType/microsoft.insights%2Fdatacollectionendpoints)\n\n3. Collect the **Immutable ID** from `gz-sentinel-dcr` [Data Collection Rule](https://portal.azure.com/#browse/microsoft.insights%2Fdatacollectionrules)\n\n4. Go to your GravityZone Cloud account and navigate to **My Account**. Create an API key with **Event Push Service** permissions.\n\n5. Configure your **Event Push Service** settings using this article. [Customers](https://www.bitdefender.com/business/support/en/77209-1455218-integrate-gravityzone-with-azure-sentinel.html#UUID-5e8bbfa1-7892[%E2%80%A6]-2427-abd6f930e8c2) | [Partners](https://www.bitdefender.com/business/support/en/77211-1455218-integrate-gravityzone-with-azure-sentinel.html#UUID-5e8bbfa1-7892[%E2%80%A6]-2427-abd6f930e8c2).\n\n**Please note that after the successful deployment of the Data Connector & successful setup of GravityZone's Event Push Service, the system will receive Activity Log data in near-real-time. A short delay may occur between data transmission and its appearance in the Microsoft Sentinel Logs section.**" + } + ] +} diff --git a/Solutions/GravityZone/Data Connectors/azuredeploy_GravityZone_API.json b/Solutions/GravityZone/Data Connectors/azuredeploy_GravityZone_API.json new file mode 100644 index 00000000000..3acab6fa236 --- /dev/null +++ b/Solutions/GravityZone/Data Connectors/azuredeploy_GravityZone_API.json @@ -0,0 +1,424 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "metadata": { + "_generator": { + "name": "bicep", + "version": "0.39.26.7824", + "templateHash": "16324191430234905054" + } + }, + "parameters": { + "logAnalyticsWorkspaceName": { + "type": "string", + "metadata": { + "description": "The name of the Log Analytics Workspace where you want to deploy the Data Connector. It must already exist and be onboarded into Microsoft Sentinel beforehand." + } + }, + "entraAppObjectId": { + "type": "string", + "metadata": { + "description": "The Object ID of the Enterprise App linked to your Entra App Registration. This is required to set up the necessary role assignments for data ingestion." + } + } + }, + "variables": { + "vendorTag": "Bitdefender", + "applicationTag": "GravityZone Log Ingestion", + "dataCollectionEndpointName": "gz-sentinel-dce", + "dataCollectionRuleName": "gz-sentinel-dcr", + "eventsTableName": "GzSecurityEvents_CL" + }, + "resources": [ + { + "type": "Microsoft.Resources/deployments", + "apiVersion": "2025-04-01", + "name": "table-deployment", + "properties": { + "expressionEvaluationOptions": { + "scope": "inner" + }, + "mode": "Incremental", + "parameters": { + "logAnalyticsWorkspaceName": { + "value": "[parameters('logAnalyticsWorkspaceName')]" + }, + "eventsTableName": { + "value": "[variables('eventsTableName')]" + } + }, + "template": { + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "metadata": { + "_generator": { + "name": "bicep", + "version": "0.39.26.7824", + "templateHash": "18072963239405517330" + } + }, + "parameters": { + "eventsTableName": { + "type": "string" + }, + "logAnalyticsWorkspaceName": { + "type": "string" + } + }, + "resources": [ + { + "type": "Microsoft.OperationalInsights/workspaces/tables", + "apiVersion": "2023-09-01", + "name": "[format('{0}/{1}', parameters('logAnalyticsWorkspaceName'), parameters('eventsTableName'))]", + "properties": { + "schema": { + "name": "[parameters('eventsTableName')]", + "columns": [ + { + "name": "TimeGenerated", + "type": "datetime" + }, + { + "name": "start_time", + "type": "datetime" + }, + { + "name": "end_time", + "type": "datetime" + }, + { + "name": "company_id", + "type": "string" + }, + { + "name": "module", + "type": "string" + }, + { + "name": "data", + "type": "dynamic" + } + ] + } + } + } + ] + } + } + }, + { + "type": "Microsoft.Resources/deployments", + "apiVersion": "2025-04-01", + "name": "dce-deployment", + "properties": { + "expressionEvaluationOptions": { + "scope": "inner" + }, + "mode": "Incremental", + "parameters": { + "vendorTag": { + "value": "[variables('vendorTag')]" + }, + "applicationTag": { + "value": "[variables('applicationTag')]" + }, + "location": { + "value": "[resourceGroup().location]" + }, + "dataCollectionEndpointName": { + "value": "[variables('dataCollectionEndpointName')]" + } + }, + "template": { + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "metadata": { + "_generator": { + "name": "bicep", + "version": "0.39.26.7824", + "templateHash": "3504576669584336134" + } + }, + "parameters": { + "location": { + "type": "string" + }, + "dataCollectionEndpointName": { + "type": "string" + }, + "vendorTag": { + "type": "string" + }, + "applicationTag": { + "type": "string" + } + }, + "resources": [ + { + "type": "Microsoft.Insights/dataCollectionEndpoints", + "apiVersion": "2023-03-11", + "name": "[parameters('dataCollectionEndpointName')]", + "location": "[parameters('location')]", + "tags": { + "vendor": "[parameters('vendorTag')]", + "application": "[parameters('applicationTag')]" + }, + "properties": { + "networkAcls": { + "publicNetworkAccess": "Enabled" + } + } + } + ], + "outputs": { + "id": { + "type": "string", + "value": "[resourceId('Microsoft.Insights/dataCollectionEndpoints', parameters('dataCollectionEndpointName'))]" + }, + "endpoint": { + "type": "string", + "value": "[reference(resourceId('Microsoft.Insights/dataCollectionEndpoints', parameters('dataCollectionEndpointName')), '2023-03-11').logsIngestion.endpoint]" + } + } + } + }, + "dependsOn": [ + "[resourceId('Microsoft.Resources/deployments', 'table-deployment')]" + ] + }, + { + "type": "Microsoft.Resources/deployments", + "apiVersion": "2025-04-01", + "name": "dcr-deployment", + "properties": { + "expressionEvaluationOptions": { + "scope": "inner" + }, + "mode": "Incremental", + "parameters": { + "vendorTag": { + "value": "[variables('vendorTag')]" + }, + "applicationTag": { + "value": "[variables('applicationTag')]" + }, + "location": { + "value": "[resourceGroup().location]" + }, + "logAnalyticsWorkspaceId": { + "value": "[resourceId('Microsoft.OperationalInsights/workspaces', parameters('logAnalyticsWorkspaceName'))]" + }, + "dataCollectionEndpointId": { + "value": "[reference(resourceId('Microsoft.Resources/deployments', 'dce-deployment'), '2025-04-01').outputs.id.value]" + }, + "dataCollectionRuleName": { + "value": "[variables('dataCollectionRuleName')]" + }, + "eventsTableName": { + "value": "[variables('eventsTableName')]" + } + }, + "template": { + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "metadata": { + "_generator": { + "name": "bicep", + "version": "0.39.26.7824", + "templateHash": "7691978510549466459" + } + }, + "parameters": { + "location": { + "type": "string" + }, + "dataCollectionEndpointId": { + "type": "string" + }, + "vendorTag": { + "type": "string" + }, + "applicationTag": { + "type": "string" + }, + "eventsTableName": { + "type": "string" + }, + "dataCollectionRuleName": { + "type": "string" + }, + "logAnalyticsWorkspaceId": { + "type": "string" + } + }, + "resources": [ + { + "type": "Microsoft.Insights/dataCollectionRules", + "apiVersion": "2023-03-11", + "name": "[parameters('dataCollectionRuleName')]", + "location": "[parameters('location')]", + "tags": { + "vendor": "[parameters('vendorTag')]", + "application": "[parameters('applicationTag')]" + }, + "identity": { + "type": "SystemAssigned" + }, + "kind": "Direct", + "properties": { + "dataCollectionEndpointId": "[parameters('dataCollectionEndpointId')]", + "dataFlows": [ + { + "streams": [ + "[format('Custom-{0}', parameters('eventsTableName'))]" + ], + "destinations": [ + "default" + ], + "transformKql": " source\n | extend TimeGenerated = now()\n | extend start_time = todatetime(start_time)\n | extend end_time = todatetime(end_time)\n | extend company_id = tostring(companyId)\n | extend module = tostring(module)\n | extend data = todynamic(data)\n | project module, company_id, data, start_time, end_time, TimeGenerated\n ", + "outputStream": "[format('Custom-{0}', parameters('eventsTableName'))]" + } + ], + "streamDeclarations": { + "[format('Custom-{0}', parameters('eventsTableName'))]": { + "columns": [ + { + "name": "start_time", + "type": "string" + }, + { + "name": "end_time", + "type": "string" + }, + { + "name": "module", + "type": "string" + }, + { + "name": "companyId", + "type": "string" + }, + { + "name": "data", + "type": "dynamic" + } + ] + } + }, + "dataSources": {}, + "destinations": { + "logAnalytics": [ + { + "workspaceResourceId": "[parameters('logAnalyticsWorkspaceId')]", + "name": "default" + } + ] + } + } + } + ], + "outputs": { + "immutableId": { + "type": "string", + "value": "[reference(resourceId('Microsoft.Insights/dataCollectionRules', parameters('dataCollectionRuleName')), '2023-03-11').immutableId]" + } + } + } + }, + "dependsOn": [ + "[resourceId('Microsoft.Resources/deployments', 'dce-deployment')]" + ] + }, + { + "type": "Microsoft.Resources/deployments", + "apiVersion": "2025-04-01", + "name": "role-assignments-deployment", + "properties": { + "expressionEvaluationOptions": { + "scope": "inner" + }, + "mode": "Incremental", + "parameters": { + "logAnalyticsWorkspaceName": { + "value": "[parameters('logAnalyticsWorkspaceName')]" + }, + "dataCollectionRuleName": { + "value": "[variables('dataCollectionRuleName')]" + }, + "eventsTableName": { + "value": "[variables('eventsTableName')]" + }, + "entraAppObjectId": { + "value": "[parameters('entraAppObjectId')]" + } + }, + "template": { + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "metadata": { + "_generator": { + "name": "bicep", + "version": "0.39.26.7824", + "templateHash": "17134287068127174284" + } + }, + "parameters": { + "dataCollectionRuleName": { + "type": "string" + }, + "eventsTableName": { + "type": "string" + }, + "logAnalyticsWorkspaceName": { + "type": "string" + }, + "entraAppObjectId": { + "type": "string" + } + }, + "variables": { + "monitoringMetricsPublisherRoleId": "[subscriptionResourceId('Microsoft.Authorization/roleDefinitions', '3913510d-42f4-4e42-8a64-420c390055eb')]" + }, + "resources": [ + { + "type": "Microsoft.Authorization/roleAssignments", + "apiVersion": "2022-04-01", + "scope": "[format('Microsoft.Insights/dataCollectionRules/{0}', parameters('dataCollectionRuleName'))]", + "name": "[guid(subscription().subscriptionId, resourceGroup().id, parameters('entraAppObjectId'), parameters('dataCollectionRuleName'), 'MonitoringMetricsPublisher')]", + "properties": { + "roleDefinitionId": "[variables('monitoringMetricsPublisherRoleId')]", + "principalId": "[parameters('entraAppObjectId')]", + "principalType": "ServicePrincipal" + } + }, + { + "type": "Microsoft.Authorization/roleAssignments", + "apiVersion": "2022-04-01", + "scope": "[format('Microsoft.OperationalInsights/workspaces/{0}/tables/{1}', parameters('logAnalyticsWorkspaceName'), parameters('eventsTableName'))]", + "name": "[guid(subscription().subscriptionId, resourceGroup().id, parameters('dataCollectionRuleName'), parameters('eventsTableName'), 'monitoring-metrics-publisher')]", + "properties": { + "roleDefinitionId": "[variables('monitoringMetricsPublisherRoleId')]", + "principalId": "[reference(resourceId('Microsoft.Insights/dataCollectionRules', parameters('dataCollectionRuleName')), '2023-03-11', 'full').identity.principalId]", + "principalType": "ServicePrincipal" + } + } + ] + } + }, + "dependsOn": [ + "[resourceId('Microsoft.Resources/deployments', 'dcr-deployment')]", + "[resourceId('Microsoft.Resources/deployments', 'table-deployment')]" + ] + } + ], + "outputs": { + "dcrId": { + "type": "string", + "value": "[reference(resourceId('Microsoft.Resources/deployments', 'dcr-deployment'), '2025-04-01').outputs.immutableId.value]" + }, + "dce": { + "type": "string", + "value": "[reference(resourceId('Microsoft.Resources/deployments', 'dce-deployment'), '2025-04-01').outputs.endpoint.value]" + } + } +} \ No newline at end of file diff --git a/Solutions/GravityZone/Data/Solution_GravityZone.json b/Solutions/GravityZone/Data/Solution_GravityZone.json new file mode 100644 index 00000000000..e94b3cc5ce3 --- /dev/null +++ b/Solutions/GravityZone/Data/Solution_GravityZone.json @@ -0,0 +1,22 @@ +{ + "Name": "GravityZone", + "Author": "Bitdefender SRL", + "Logo": "", + "Description": "The **Bitdefender GravityZone Solution for Microsoft Sentinel** connects your GravityZone environment to Microsoft Sentinel through the **Event Push Service API**. Once deployed, it streams all GravityZone event types into Sentinel, storing them as logs and automatically generating incidents for selected categories.\n\nTo deploy, complete the fields below and select **Review + create**:\n - **Subscription**: The Azure subscription where you want to deploy the solution.\n - **Resource group**: Select an existing resource group or create a new one to organize your Sentinel resources.\n - **Workspace**: The Microsoft Sentinel workspace that will receive GravityZone data. The workspace needs to be onboarded into Microsoft Sentinel beforehand.\n\nAfter deployment, follow the setup steps in this KB article to complete the configuration and start event ingestion.", + "WorkbookBladeDescription": "This Microsoft Sentinel Solution installs workbooks. Workbooks provide a flexible canvas for data monitoring, analysis, and creating rich visual reports within the Azure portal. They allow you to combine one or more data sources from Microsoft Sentinel into unified interactive experience.", + "Data Connectors": [ + "Data Connectors/GravityZone_API.json" + ], + "Workbooks": [ + ], + "Parsers": [ + ], + "Analytic Rules": [ + "Analytic Rules/Incidents.yaml" + ], + "BasePath": "https://github.com/Azure/Azure-Sentinel/tree/master/Solutions/GravityZone/", + "Version": "3.0.0", + "Metadata": "SolutionMetadata.json", + "TemplateSpec": true, + "Is1PConnector": false +} diff --git a/Solutions/GravityZone/Package/3.0.0.zip b/Solutions/GravityZone/Package/3.0.0.zip new file mode 100644 index 00000000000..84ec8b36d5a Binary files /dev/null and b/Solutions/GravityZone/Package/3.0.0.zip differ diff --git a/Solutions/GravityZone/Package/createUiDefinition.json b/Solutions/GravityZone/Package/createUiDefinition.json new file mode 100644 index 00000000000..b9809c7cbd8 --- /dev/null +++ b/Solutions/GravityZone/Package/createUiDefinition.json @@ -0,0 +1,127 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/0.1.2-preview/CreateUIDefinition.MultiVm.json#", + "handler": "Microsoft.Azure.CreateUIDef", + "version": "0.1.2-preview", + "parameters": { + "config": { + "isWizard": false, + "basics": { + "description": "\n\n**Note:** Please refer to the following before installing the solution: \n\n• Review the solution [Release Notes](https://github.com/Azure/Azure-Sentinel/tree/master/Solutions/GravityZone/ReleaseNotes.md)\n\n • There may be [known issues](https://aka.ms/sentinelsolutionsknownissues) pertaining to this Solution, please refer to them before installing.\n\nThe **Bitdefender GravityZone Solution for Microsoft Sentinel** connects your GravityZone environment to Microsoft Sentinel through the **Event Push Service API**. Once deployed, it streams all GravityZone event types into Sentinel, storing them as logs and automatically generating incidents for selected categories.\n\nTo deploy, complete the fields below and select **Review + create**:\n - **Subscription**: The Azure subscription where you want to deploy the solution.\n - **Resource group**: Select an existing resource group or create a new one to organize your Sentinel resources.\n - **Workspace**: The Microsoft Sentinel workspace that will receive GravityZone data. The workspace needs to be onboarded into Microsoft Sentinel beforehand.\n\nAfter deployment, follow the setup steps in this KB article to complete the configuration and start event ingestion.\n\n**Data Connectors:** 1, **Analytic Rules:** 1\n\n[Learn more about Microsoft Sentinel](https://aka.ms/azuresentinel) | [Learn more about Solutions](https://aka.ms/azuresentinelsolutionsdoc)", + "subscription": { + "resourceProviders": [ + "Microsoft.OperationsManagement/solutions", + "Microsoft.OperationalInsights/workspaces/providers/alertRules", + "Microsoft.Insights/workbooks", + "Microsoft.Logic/workflows" + ] + }, + "location": { + "metadata": { + "hidden": "Hiding location, we get it from the log analytics workspace" + }, + "visible": false + }, + "resourceGroup": { + "allowExisting": true + } + } + }, + "basics": [ + { + "name": "getLAWorkspace", + "type": "Microsoft.Solutions.ArmApiControl", + "toolTip": "This filters by workspaces that exist in the Resource Group selected", + "condition": "[greater(length(resourceGroup().name),0)]", + "request": { + "method": "GET", + "path": "[concat(subscription().id,'/providers/Microsoft.OperationalInsights/workspaces?api-version=2020-08-01')]" + } + }, + { + "name": "workspace", + "type": "Microsoft.Common.DropDown", + "label": "Workspace", + "placeholder": "Select a workspace", + "toolTip": "This dropdown will list only workspace that exists in the Resource Group selected", + "constraints": { + "allowedValues": "[map(filter(basics('getLAWorkspace').value, (filter) => contains(toLower(filter.id), toLower(resourceGroup().name))), (item) => parse(concat('{\"label\":\"', item.name, '\",\"value\":\"', item.name, '\"}')))]", + "required": true + }, + "visible": true + } + ], + "steps": [ + { + "name": "dataconnectors", + "label": "Data Connectors", + "bladeTitle": "Data Connectors", + "elements": [ + { + "name": "dataconnectors1-text", + "type": "Microsoft.Common.TextBlock", + "options": { + "text": "This Solution installs the data connector for GravityZone. You can get GravityZone custom log data in your Microsoft Sentinel workspace. After installing the solution, configure and enable this data connector by following guidance in Manage solution view." + } + }, + { + "name": "dataconnectors-link1", + "type": "Microsoft.Common.TextBlock", + "options": { + "link": { + "label": "Learn more about connecting data sources", + "uri": "https://docs.microsoft.com/azure/sentinel/connect-data-sources" + } + } + } + ] + }, + { + "name": "analytics", + "label": "Analytics", + "subLabel": { + "preValidation": "Configure the analytics", + "postValidation": "Done" + }, + "bladeTitle": "Analytics", + "elements": [ + { + "name": "analytics-text", + "type": "Microsoft.Common.TextBlock", + "options": { + "text": "This solution installs the following analytic rule templates. After installing the solution, create and enable analytic rules in Manage solution view." + } + }, + { + "name": "analytics-link", + "type": "Microsoft.Common.TextBlock", + "options": { + "link": { + "label": "Learn more", + "uri": "https://docs.microsoft.com/azure/sentinel/tutorial-detect-threats-custom?WT.mc_id=Portal-Microsoft_Azure_CreateUIDef" + } + } + }, + { + "name": "analytic1", + "type": "Microsoft.Common.Section", + "label": "NRT GravityZone Incident Alerts", + "elements": [ + { + "name": "analytic1-text", + "type": "Microsoft.Common.TextBlock", + "options": { + "text": "The query identifies incident-level events received from the GravityZone Data Connector" + } + } + ] + } + ] + } + ], + "outputs": { + "workspace-location": "[first(map(filter(basics('getLAWorkspace').value, (filter) => and(contains(toLower(filter.id), toLower(resourceGroup().name)),equals(filter.name,basics('workspace')))), (item) => item.location))]", + "location": "[location()]", + "workspace": "[basics('workspace')]" + } + } +} diff --git a/Solutions/GravityZone/Package/mainTemplate.json b/Solutions/GravityZone/Package/mainTemplate.json new file mode 100644 index 00000000000..47f126306b4 --- /dev/null +++ b/Solutions/GravityZone/Package/mainTemplate.json @@ -0,0 +1,528 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "metadata": { + "author": "Bitdefender SRL", + "comments": "Solution template for GravityZone" + }, + "parameters": { + "location": { + "type": "string", + "minLength": 1, + "defaultValue": "[resourceGroup().location]", + "metadata": { + "description": "Not used, but needed to pass arm-ttk test `Location-Should-Not-Be-Hardcoded`. We instead use the `workspace-location` which is derived from the LA workspace" + } + }, + "workspace-location": { + "type": "string", + "defaultValue": "", + "metadata": { + "description": "[concat('Region to deploy solution resources -- separate from location selection',parameters('location'))]" + } + }, + "workspace": { + "defaultValue": "", + "type": "string", + "metadata": { + "description": "Workspace name for Log Analytics where Microsoft Sentinel is setup" + } + } + }, + "variables": { + "_solutionName": "GravityZone", + "_solutionVersion": "3.0.0", + "solutionId": "bitdefendersrl1662990682574.gbarbieru_test_offer_id", + "_solutionId": "[variables('solutionId')]", + "uiConfigId1": "GravityZoneDataConnector", + "_uiConfigId1": "[variables('uiConfigId1')]", + "dataConnectorContentId1": "GravityZoneDataConnector", + "_dataConnectorContentId1": "[variables('dataConnectorContentId1')]", + "dataConnectorId1": "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/dataConnectors', variables('_dataConnectorContentId1'))]", + "_dataConnectorId1": "[variables('dataConnectorId1')]", + "dataConnectorTemplateSpecName1": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat(parameters('workspace'),'-dc-',uniquestring(variables('_dataConnectorContentId1'))))]", + "dataConnectorVersion1": "1.0.0", + "_dataConnectorcontentProductId1": "[concat(take(variables('_solutionId'),50),'-','dc','-', uniqueString(concat(variables('_solutionId'),'-','DataConnector','-',variables('_dataConnectorContentId1'),'-', variables('dataConnectorVersion1'))))]", + "analyticRuleObject1": { + "analyticRuleVersion1": "1.0.0", + "_analyticRulecontentId1": "73c803aa-1188-45dd-8379-62a3319d3d9f", + "analyticRuleId1": "[resourceId('Microsoft.SecurityInsights/AlertRuleTemplates', '73c803aa-1188-45dd-8379-62a3319d3d9f')]", + "analyticRuleTemplateSpecName1": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat(parameters('workspace'),'-ar-',uniquestring('73c803aa-1188-45dd-8379-62a3319d3d9f')))]", + "_analyticRulecontentProductId1": "[concat(take(variables('_solutionId'),50),'-','ar','-', uniqueString(concat(variables('_solutionId'),'-','AnalyticsRule','-','73c803aa-1188-45dd-8379-62a3319d3d9f','-', '1.0.0')))]" + }, + "_solutioncontentProductId": "[concat(take(variables('_solutionId'),50),'-','sl','-', uniqueString(concat(variables('_solutionId'),'-','Solution','-',variables('_solutionId'),'-', variables('_solutionVersion'))))]" + }, + "resources": [ + { + "type": "Microsoft.OperationalInsights/workspaces/providers/contentTemplates", + "apiVersion": "2023-04-01-preview", + "name": "[variables('dataConnectorTemplateSpecName1')]", + "location": "[parameters('workspace-location')]", + "dependsOn": [ + "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" + ], + "properties": { + "description": "GravityZone data connector with template version 3.0.0", + "mainTemplate": { + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "[variables('dataConnectorVersion1')]", + "parameters": {}, + "variables": {}, + "resources": [ + { + "name": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',variables('_dataConnectorContentId1'))]", + "apiVersion": "2021-03-01-preview", + "type": "Microsoft.OperationalInsights/workspaces/providers/dataConnectors", + "location": "[parameters('workspace-location')]", + "kind": "GenericUI", + "properties": { + "connectorUiConfig": { + "id": "[variables('_uiConfigId1')]", + "title": "GravityZone Data Connector", + "publisher": "Bitdefender", + "descriptionMarkdown": "This connector enables integration between **Bitdefender GravityZone** and **Microsoft Sentinel** through the **Event Push Service API**. Once configured, it streams all GravityZone event types directly into your Sentinel workspace, where they are stored as logs in the `GzSecurityEvents_CL` table.\n\nKey event categories such as **EDR, XDR, ransomware mitigation, network sandboxing, and Exchange malware events** can be automatically correlated and generate incidents through the **NRT GravityZone Incident Alerts** analytics rule.", + "graphQueries": [ + { + "metricName": "Total events received", + "legend": "Events", + "baseQuery": "GzSecurityEvents_CL" + } + ], + "sampleQueries": [ + { + "description": "Get Sample Events", + "query": "GzSecurityEvents_CL\n | take 10" + } + ], + "dataTypes": [ + { + "name": "GzSecurityEvents_CL", + "lastDataReceivedQuery": "GzSecurityEvents_CL\n | summarize Time = max(TimeGenerated)" + } + ], + "connectivityCriterias": [ + { + "type": "IsConnectedQuery", + "value": [ + "GzSecurityEvents_CL\n | summarize LastLogReceived = max(TimeGenerated)\n | project IsConnected = LastLogReceived > ago(7d)" + ] + } + ], + "availability": { + "status": 1, + "isPreview": false + }, + "permissions": { + "resourceProvider": [ + { + "provider": "Microsoft.OperationalInsights/workspaces", + "permissionsDisplayText": "read and write permissions on the workspace are required.", + "providerDisplayName": "Workspace", + "scope": "Workspace", + "requiredPermissions": { + "write": true, + "read": true, + "delete": true + } + }, + { + "provider": "Microsoft.Insights/DataCollectionRules", + "permissionsDisplayText": "read and write permissions to create data collection rules are required.", + "providerDisplayName": "Data Collection Rule", + "scope": "ResourceGroup", + "requiredPermissions": { + "Write": true, + "Read": true, + "Delete": false + } + }, + { + "provider": "Microsoft.Insights/DataCollectionEndpoints", + "permissionsDisplayText": "read and write permissions to create data collection endpoints are required.", + "providerDisplayName": "Data Collection Endpoint", + "scope": "ResourceGroup", + "requiredPermissions": { + "Write": true, + "Read": true, + "Delete": false + } + } + ], + "customs": [ + { + "name": "Azure App Registration", + "description": "Microsoft Entra App Registration with the following details retained Directory (Tenant) ID, Application (Client) ID, Managed Service Principal Object ID (from the Enterprise Applications entry of the app), Client Secret (generated under Certificates & secrets)." + }, + { + "name": "GravityZone Cloud Account", + "description": "A GravityZone Cloud account with a generated API key for the Event Push Service endpoint." + }, + { + "name": "Read our guide", + "description": "Follow this step-by-step article to set up the integration. [Customers](https://www.bitdefender.com/business/support/en/77209-1455218-integrate-gravityzone-with-azure-sentinel.html) | [Partners](https://www.bitdefender.com/business/support/en/77211-1455218-integrate-gravityzone-with-azure-sentinel.html)" + } + ] + }, + "instructionSteps": [ + { + "description": "1. Click the **Deploy to Azure** button below and fill in the required parameters. \n\n\t\n\n[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-gravityzone-azuredeploy)\n\n2. Collect the **Logs Ingestion** URL from `gz-sentinel-dce` [Data Collection Endpoint](https://portal.azure.com/#view/HubsExtension/BrowseResource.ReactView/resourceType/microsoft.insights%2Fdatacollectionendpoints)\n\n3. Collect the **Immutable ID** from `gz-sentinel-dcr` [Data Collection Rule](https://portal.azure.com/#browse/microsoft.insights%2Fdatacollectionrules)\n\n4. Go to your GravityZone Cloud account and navigate to **My Account**. Create an API key with **Event Push Service** permissions.\n\n5. Configure your **Event Push Service** settings using this article. [Customers](https://www.bitdefender.com/business/support/en/77209-1455218-integrate-gravityzone-with-azure-sentinel.html#UUID-5e8bbfa1-7892[%E2%80%A6]-2427-abd6f930e8c2) | [Partners](https://www.bitdefender.com/business/support/en/77211-1455218-integrate-gravityzone-with-azure-sentinel.html#UUID-5e8bbfa1-7892[%E2%80%A6]-2427-abd6f930e8c2).\n\n**Please note that after the successful deployment of the Data Connector & successful setup of GravityZone's Event Push Service, the system will receive Activity Log data in near-real-time. A short delay may occur between data transmission and its appearance in the Microsoft Sentinel Logs section.**" + } + ] + } + } + }, + { + "type": "Microsoft.OperationalInsights/workspaces/providers/metadata", + "apiVersion": "2023-04-01-preview", + "name": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat('DataConnector-', last(split(variables('_dataConnectorId1'),'/'))))]", + "properties": { + "parentId": "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/dataConnectors', variables('_dataConnectorContentId1'))]", + "contentId": "[variables('_dataConnectorContentId1')]", + "kind": "DataConnector", + "version": "[variables('dataConnectorVersion1')]", + "source": { + "kind": "Solution", + "name": "GravityZone", + "sourceId": "[variables('_solutionId')]" + }, + "author": { + "name": "Bitdefender SRL" + }, + "support": { + "name": "Bitdefender SRL", + "email": "support@bitdefender.com", + "tier": "Partner", + "link": "https://bitdefender.com" + } + } + } + ] + }, + "packageKind": "Solution", + "packageVersion": "[variables('_solutionVersion')]", + "packageName": "[variables('_solutionName')]", + "packageId": "[variables('_solutionId')]", + "contentSchemaVersion": "3.0.0", + "contentId": "[variables('_dataConnectorContentId1')]", + "contentKind": "DataConnector", + "displayName": "GravityZone Data Connector", + "contentProductId": "[variables('_dataConnectorcontentProductId1')]", + "id": "[variables('_dataConnectorcontentProductId1')]", + "version": "[variables('dataConnectorVersion1')]" + } + }, + { + "type": "Microsoft.OperationalInsights/workspaces/providers/metadata", + "apiVersion": "2023-04-01-preview", + "name": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat('DataConnector-', last(split(variables('_dataConnectorId1'),'/'))))]", + "dependsOn": [ + "[variables('_dataConnectorId1')]" + ], + "location": "[parameters('workspace-location')]", + "properties": { + "parentId": "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/dataConnectors', variables('_dataConnectorContentId1'))]", + "contentId": "[variables('_dataConnectorContentId1')]", + "kind": "DataConnector", + "version": "[variables('dataConnectorVersion1')]", + "source": { + "kind": "Solution", + "name": "GravityZone", + "sourceId": "[variables('_solutionId')]" + }, + "author": { + "name": "Bitdefender SRL" + }, + "support": { + "name": "Bitdefender SRL", + "email": "support@bitdefender.com", + "tier": "Partner", + "link": "https://bitdefender.com" + } + } + }, + { + "name": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',variables('_dataConnectorContentId1'))]", + "apiVersion": "2021-03-01-preview", + "type": "Microsoft.OperationalInsights/workspaces/providers/dataConnectors", + "location": "[parameters('workspace-location')]", + "kind": "GenericUI", + "properties": { + "connectorUiConfig": { + "title": "GravityZone Data Connector", + "publisher": "Bitdefender", + "descriptionMarkdown": "This connector enables integration between **Bitdefender GravityZone** and **Microsoft Sentinel** through the **Event Push Service API**. Once configured, it streams all GravityZone event types directly into your Sentinel workspace, where they are stored as logs in the `GzSecurityEvents_CL` table.\n\nKey event categories such as **EDR, XDR, ransomware mitigation, network sandboxing, and Exchange malware events** can be automatically correlated and generate incidents through the **NRT GravityZone Incident Alerts** analytics rule.", + "graphQueries": [ + { + "metricName": "Total events received", + "legend": "Events", + "baseQuery": "GzSecurityEvents_CL" + } + ], + "dataTypes": [ + { + "name": "GzSecurityEvents_CL", + "lastDataReceivedQuery": "GzSecurityEvents_CL\n | summarize Time = max(TimeGenerated)" + } + ], + "connectivityCriterias": [ + { + "type": "IsConnectedQuery", + "value": [ + "GzSecurityEvents_CL\n | summarize LastLogReceived = max(TimeGenerated)\n | project IsConnected = LastLogReceived > ago(7d)" + ] + } + ], + "sampleQueries": [ + { + "description": "Get Sample Events", + "query": "GzSecurityEvents_CL\n | take 10" + } + ], + "availability": { + "status": 1, + "isPreview": false + }, + "permissions": { + "resourceProvider": [ + { + "provider": "Microsoft.OperationalInsights/workspaces", + "permissionsDisplayText": "read and write permissions on the workspace are required.", + "providerDisplayName": "Workspace", + "scope": "Workspace", + "requiredPermissions": { + "write": true, + "read": true, + "delete": true + } + }, + { + "provider": "Microsoft.Insights/DataCollectionRules", + "permissionsDisplayText": "read and write permissions to create data collection rules are required.", + "providerDisplayName": "Data Collection Rule", + "scope": "ResourceGroup", + "requiredPermissions": { + "Write": true, + "Read": true, + "Delete": false + } + }, + { + "provider": "Microsoft.Insights/DataCollectionEndpoints", + "permissionsDisplayText": "read and write permissions to create data collection endpoints are required.", + "providerDisplayName": "Data Collection Endpoint", + "scope": "ResourceGroup", + "requiredPermissions": { + "Write": true, + "Read": true, + "Delete": false + } + } + ], + "customs": [ + { + "name": "Azure App Registration", + "description": "Microsoft Entra App Registration with the following details retained Directory (Tenant) ID, Application (Client) ID, Managed Service Principal Object ID (from the Enterprise Applications entry of the app), Client Secret (generated under Certificates & secrets)." + }, + { + "name": "GravityZone Cloud Account", + "description": "A GravityZone Cloud account with a generated API key for the Event Push Service endpoint." + }, + { + "name": "Read our guide", + "description": "Follow this step-by-step article to set up the integration. [Customers](https://www.bitdefender.com/business/support/en/77209-1455218-integrate-gravityzone-with-azure-sentinel.html) | [Partners](https://www.bitdefender.com/business/support/en/77211-1455218-integrate-gravityzone-with-azure-sentinel.html)" + } + ] + }, + "instructionSteps": [ + { + "description": "1. Click the **Deploy to Azure** button below and fill in the required parameters. \n\n\t\n\n[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-gravityzone-azuredeploy)\n\n2. Collect the **Logs Ingestion** URL from `gz-sentinel-dce` [Data Collection Endpoint](https://portal.azure.com/#view/HubsExtension/BrowseResource.ReactView/resourceType/microsoft.insights%2Fdatacollectionendpoints)\n\n3. Collect the **Immutable ID** from `gz-sentinel-dcr` [Data Collection Rule](https://portal.azure.com/#browse/microsoft.insights%2Fdatacollectionrules)\n\n4. Go to your GravityZone Cloud account and navigate to **My Account**. Create an API key with **Event Push Service** permissions.\n\n5. Configure your **Event Push Service** settings using this article. [Customers](https://www.bitdefender.com/business/support/en/77209-1455218-integrate-gravityzone-with-azure-sentinel.html#UUID-5e8bbfa1-7892[%E2%80%A6]-2427-abd6f930e8c2) | [Partners](https://www.bitdefender.com/business/support/en/77211-1455218-integrate-gravityzone-with-azure-sentinel.html#UUID-5e8bbfa1-7892[%E2%80%A6]-2427-abd6f930e8c2).\n\n**Please note that after the successful deployment of the Data Connector & successful setup of GravityZone's Event Push Service, the system will receive Activity Log data in near-real-time. A short delay may occur between data transmission and its appearance in the Microsoft Sentinel Logs section.**" + } + ], + "id": "[variables('_uiConfigId1')]" + } + } + }, + { + "type": "Microsoft.OperationalInsights/workspaces/providers/contentTemplates", + "apiVersion": "2023-04-01-preview", + "name": "[variables('analyticRuleObject1').analyticRuleTemplateSpecName1]", + "location": "[parameters('workspace-location')]", + "dependsOn": [ + "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" + ], + "properties": { + "description": "Incidents_AnalyticalRules Analytics Rule with template version 3.0.0", + "mainTemplate": { + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "[variables('analyticRuleObject1').analyticRuleVersion1]", + "parameters": {}, + "variables": {}, + "resources": [ + { + "type": "Microsoft.SecurityInsights/AlertRuleTemplates", + "name": "[variables('analyticRuleObject1')._analyticRulecontentId1]", + "apiVersion": "2023-02-01-preview", + "kind": "NRT", + "location": "[parameters('workspace-location')]", + "properties": { + "description": "The query identifies incident-level events received from the GravityZone Data Connector", + "displayName": "NRT GravityZone Incident Alerts", + "enabled": false, + "query": "ASimAlertEventBitdefenderGravityZone\n | extend IncidentType = case(\n AdditionalFields.Module == \"new-incident\", \"EDR Incident\",\n AdditionalFields.Module == \"new-extended-incident\", \"XDR Incident\",\n AdditionalFields.Module == \"ransomware-mitigation\", \"Ransomware Mitigation\",\n AdditionalFields.Module == \"network-sandboxing\", \"Sandbox Analyzer Detection\",\n AdditionalFields.Module == \"exchange-malware\", \"Exchange Malware Detection\",\n \"Incident\" // fallback value if null or unmatched\n ),\n Tactics = AdditionalFields.AttackTypes\n | project EventUid, EventSeverity, EventStartTime, IncidentType, Tactics, EventVendor, EventProduct, DvcId, DvcIpAddr, DvcHostname, DvcAction, DvcFQDN\n", + "severity": "Medium", + "suppressionDuration": "PT1H", + "suppressionEnabled": false, + "status": "Available", + "requiredDataConnectors": [ + { + "connectorId": "GravityZoneDataConnector", + "dataTypes": [ + "ASimAlertEventBitdefenderGravityZone" + ] + } + ], + "entityMappings": [ + { + "entityType": "Host", + "fieldMappings": [ + { + "columnName": "DvcHostname", + "identifier": "HostName" + } + ] + }, + { + "entityType": "IP", + "fieldMappings": [ + { + "columnName": "DvcIpAddr", + "identifier": "Address" + } + ] + } + ], + "eventGroupingSettings": { + "aggregationKind": "AlertPerResult" + }, + "alertDetailsOverride": { + "alertDescriptionFormat": "Alert generated on {{EventStartTime}} in Bitdefender GravityZone.\\n\\nGravityZone Incident ID / Alert GUID: {{EventUid}}\\n\\nPlease check the source for more information and investigate further.\n", + "alertDisplayNameFormat": "GravityZone: {{IncidentType}}", + "alertSeverityColumnName": "EventSeverity", + "alertTacticsColumnName": "Tactics", + "alertDynamicProperties": [ + { + "value": "EventProduct", + "alertProperty": "ProductName" + }, + { + "value": "EventVendor", + "alertProperty": "ProviderName" + } + ] + } + } + }, + { + "type": "Microsoft.OperationalInsights/workspaces/providers/metadata", + "apiVersion": "2022-01-01-preview", + "name": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat('AnalyticsRule-', last(split(variables('analyticRuleObject1').analyticRuleId1,'/'))))]", + "properties": { + "description": "GravityZone Analytics Rule 1", + "parentId": "[variables('analyticRuleObject1').analyticRuleId1]", + "contentId": "[variables('analyticRuleObject1')._analyticRulecontentId1]", + "kind": "AnalyticsRule", + "version": "[variables('analyticRuleObject1').analyticRuleVersion1]", + "source": { + "kind": "Solution", + "name": "GravityZone", + "sourceId": "[variables('_solutionId')]" + }, + "author": { + "name": "Bitdefender SRL" + }, + "support": { + "name": "Bitdefender SRL", + "email": "support@bitdefender.com", + "tier": "Partner", + "link": "https://bitdefender.com" + } + } + } + ] + }, + "packageKind": "Solution", + "packageVersion": "[variables('_solutionVersion')]", + "packageName": "[variables('_solutionName')]", + "packageId": "[variables('_solutionId')]", + "contentSchemaVersion": "3.0.0", + "contentId": "[variables('analyticRuleObject1')._analyticRulecontentId1]", + "contentKind": "AnalyticsRule", + "displayName": "NRT GravityZone Incident Alerts", + "contentProductId": "[variables('analyticRuleObject1')._analyticRulecontentProductId1]", + "id": "[variables('analyticRuleObject1')._analyticRulecontentProductId1]", + "version": "[variables('analyticRuleObject1').analyticRuleVersion1]" + } + }, + { + "type": "Microsoft.OperationalInsights/workspaces/providers/contentPackages", + "apiVersion": "2023-04-01-preview", + "location": "[parameters('workspace-location')]", + "properties": { + "version": "3.0.0", + "kind": "Solution", + "contentSchemaVersion": "3.0.0", + "displayName": "GravityZone", + "publisherDisplayName": "Bitdefender SRL", + "descriptionHtml": "

Note: Please refer to the following before installing the solution:

\n

• Review the solution Release Notes

\n

• There may be known issues pertaining to this Solution, please refer to them before installing.

\n

The Bitdefender GravityZone Solution for Microsoft Sentinel connects your GravityZone environment to Microsoft Sentinel through the Event Push Service API. Once deployed, it streams all GravityZone event types into Sentinel, storing them as logs and automatically generating incidents for selected categories.

\n

To deploy, complete the fields below and select Review + create:

\n
    \n
  • Subscription: The Azure subscription where you want to deploy the solution.
  • \n
  • Resource group: Select an existing resource group or create a new one to organize your Sentinel resources.
  • \n
  • Workspace: The Microsoft Sentinel workspace that will receive GravityZone data. The workspace needs to be onboarded into Microsoft Sentinel beforehand.
  • \n
\n

After deployment, follow the setup steps in this KB article to complete the configuration and start event ingestion.

\n

Data Connectors: 1, Analytic Rules: 1

\n

Learn more about Microsoft Sentinel | Learn more about Solutions

\n", + "contentKind": "Solution", + "contentProductId": "[variables('_solutioncontentProductId')]", + "id": "[variables('_solutioncontentProductId')]", + "icon": "", + "contentId": "[variables('_solutionId')]", + "parentId": "[variables('_solutionId')]", + "source": { + "kind": "Solution", + "name": "GravityZone", + "sourceId": "[variables('_solutionId')]" + }, + "author": { + "name": "Bitdefender SRL" + }, + "support": { + "name": "Bitdefender SRL", + "email": "support@bitdefender.com", + "tier": "Partner", + "link": "https://bitdefender.com" + }, + "dependencies": { + "operator": "AND", + "criteria": [ + { + "kind": "DataConnector", + "contentId": "[variables('_dataConnectorContentId1')]", + "version": "[variables('dataConnectorVersion1')]" + }, + { + "kind": "AnalyticsRule", + "contentId": "[variables('analyticRuleObject1')._analyticRulecontentId1]", + "version": "[variables('analyticRuleObject1').analyticRuleVersion1]" + } + ] + }, + "firstPublishDate": "2025-07-28", + "providers": [ + "Bitdefender" + ], + "categories": { + "domains": [ + "Security - Information Protection", + "Security - Cloud Security", + "Security - Threat Protection" + ] + } + }, + "name": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/', variables('_solutionId'))]" + } + ], + "outputs": {} +} diff --git a/Solutions/GravityZone/Package/testParameters.json b/Solutions/GravityZone/Package/testParameters.json new file mode 100644 index 00000000000..e55ec41a9ac --- /dev/null +++ b/Solutions/GravityZone/Package/testParameters.json @@ -0,0 +1,24 @@ +{ + "location": { + "type": "string", + "minLength": 1, + "defaultValue": "[resourceGroup().location]", + "metadata": { + "description": "Not used, but needed to pass arm-ttk test `Location-Should-Not-Be-Hardcoded`. We instead use the `workspace-location` which is derived from the LA workspace" + } + }, + "workspace-location": { + "type": "string", + "defaultValue": "", + "metadata": { + "description": "[concat('Region to deploy solution resources -- separate from location selection',parameters('location'))]" + } + }, + "workspace": { + "defaultValue": "", + "type": "string", + "metadata": { + "description": "Workspace name for Log Analytics where Microsoft Sentinel is setup" + } + } +} diff --git a/Solutions/GravityZone/ReleaseNotes.md b/Solutions/GravityZone/ReleaseNotes.md new file mode 100644 index 00000000000..6435fc425f0 --- /dev/null +++ b/Solutions/GravityZone/ReleaseNotes.md @@ -0,0 +1,3 @@ +| **Version** | **Date Modified (DD-MM-YYYY)** | **Change History** | +|-------------|--------------------------------|----------------------------------------------------| +| 3.0.0 | 29-07-2025 | Initial Solution Release | \ No newline at end of file diff --git a/Solutions/GravityZone/SolutionMetadata.json b/Solutions/GravityZone/SolutionMetadata.json new file mode 100644 index 00000000000..0e80c69ea28 --- /dev/null +++ b/Solutions/GravityZone/SolutionMetadata.json @@ -0,0 +1,19 @@ +{ + "publisherId": "bitdefendersrl1662990682574", + "offerId": "gbarbieru_test_offer_id", + "firstPublishDate": "2025-07-28", + "providers": ["Bitdefender"], + "categories": { + "domains": [ + "Security - Information Protection", + "Security - Cloud Security", + "Security - Threat Protection" + ] + }, + "support": { + "name": "Bitdefender SRL", + "email": "support@bitdefender.com", + "tier": "Partner", + "link": "https://bitdefender.com" + } +} diff --git a/Solutions/Microsoft Defender Threat Intelligence/Playbooks/MDTI-Automated-Triage/azuredeploy.json b/Solutions/Microsoft Defender Threat Intelligence/Playbooks/MDTI-Automated-Triage/azuredeploy.json index a9bd5b305c5..d33932f1ab6 100644 --- a/Solutions/Microsoft Defender Threat Intelligence/Playbooks/MDTI-Automated-Triage/azuredeploy.json +++ b/Solutions/Microsoft Defender Threat Intelligence/Playbooks/MDTI-Automated-Triage/azuredeploy.json @@ -515,7 +515,6 @@ "tags": { "LogicAppsCategory": "security", "Source": "MDTI", - "hidden-SentinelWorkspaceId": "/subscriptions/a2c2c31d-ebd4-4880-a60c-d615efa9d201/resourceGroups/Sentinel-CAT/providers/microsoft.OperationalInsights/Workspaces/sentinel-lab", "hidden-SentinelTemplateName": "MDTI-Automated-Triage", "hidden-SentinelTemplateVersion": "1.0" }, diff --git a/Solutions/Microsoft Defender Threat Intelligence/Playbooks/MDTI-Data-WebComponents/azuredeploy.json b/Solutions/Microsoft Defender Threat Intelligence/Playbooks/MDTI-Data-WebComponents/azuredeploy.json index bb77c0d93cd..a047756815c 100644 --- a/Solutions/Microsoft Defender Threat Intelligence/Playbooks/MDTI-Data-WebComponents/azuredeploy.json +++ b/Solutions/Microsoft Defender Threat Intelligence/Playbooks/MDTI-Data-WebComponents/azuredeploy.json @@ -513,7 +513,6 @@ "tags": { "LogicAppsCategory": "security", "Source": "MDTI", - "hidden-SentinelWorkspaceId": "/subscriptions/a2c2c31d-ebd4-4880-a60c-d615efa9d201/resourceGroups/Sentinel-CAT/providers/microsoft.OperationalInsights/Workspaces/sentinel-lab4", "hidden-SentinelTemplateName": "MDTI-Data-WebComponents", "hidden-SentinelTemplateVersion": "1.0" }, diff --git a/Solutions/Microsoft Defender Threat Intelligence/Playbooks/MDTI-Intel-Reputation/azuredeploy.json b/Solutions/Microsoft Defender Threat Intelligence/Playbooks/MDTI-Intel-Reputation/azuredeploy.json index ecca8322d53..dbf22bc9ed8 100644 --- a/Solutions/Microsoft Defender Threat Intelligence/Playbooks/MDTI-Intel-Reputation/azuredeploy.json +++ b/Solutions/Microsoft Defender Threat Intelligence/Playbooks/MDTI-Intel-Reputation/azuredeploy.json @@ -367,7 +367,6 @@ "tags": { "LogicAppsCategory": "security", "Source": "MDTI", - "hidden-SentinelWorkspaceId": "/subscriptions/a2c2c31d-ebd4-4880-a60c-d615efa9d201/resourceGroups/Sentinel-CAT/providers/microsoft.OperationalInsights/Workspaces/sentinel-lab4", "hidden-SentinelTemplateName": "MDTI-Intel-Reputation", "hidden-SentinelTemplateVersion": "1.0" }, diff --git a/Solutions/Microsoft Defender Threat Intelligence/Playbooks/MDTI-PassiveDns/azuredeploy.json b/Solutions/Microsoft Defender Threat Intelligence/Playbooks/MDTI-PassiveDns/azuredeploy.json index 2b2cb299c8d..39b707422d6 100644 --- a/Solutions/Microsoft Defender Threat Intelligence/Playbooks/MDTI-PassiveDns/azuredeploy.json +++ b/Solutions/Microsoft Defender Threat Intelligence/Playbooks/MDTI-PassiveDns/azuredeploy.json @@ -650,7 +650,6 @@ "tags": { "LogicAppsCategory": "security", "Source": "MDTI", - "hidden-SentinelWorkspaceId": "/subscriptions/a2c2c31d-ebd4-4880-a60c-d615efa9d201/resourceGroups/Sentinel-CAT/providers/microsoft.OperationalInsights/Workspaces/sentinel-lab4", "hidden-SentinelTemplateName": "MDTI-Data-PassiveDns", "hidden-SentinelTemplateVersion": "1.0" }, diff --git a/Solutions/Microsoft Defender Threat Intelligence/Playbooks/MDTI-PassiveDnsReverse/azuredeploy.json b/Solutions/Microsoft Defender Threat Intelligence/Playbooks/MDTI-PassiveDnsReverse/azuredeploy.json index 122cd2e3866..9585a0b0b16 100644 --- a/Solutions/Microsoft Defender Threat Intelligence/Playbooks/MDTI-PassiveDnsReverse/azuredeploy.json +++ b/Solutions/Microsoft Defender Threat Intelligence/Playbooks/MDTI-PassiveDnsReverse/azuredeploy.json @@ -652,7 +652,6 @@ "tags": { "LogicAppsCategory": "security", "Source": "MDTI", - "hidden-SentinelWorkspaceId": "/subscriptions/a2c2c31d-ebd4-4880-a60c-d615efa9d201/resourceGroups/Sentinel-CAT/providers/microsoft.OperationalInsights/Workspaces/sentinel-lab4", "hidden-SentinelTemplateName": "MDTI-Data-ReverseDnS", "hidden-SentinelTemplateVersion": "1.0" }, diff --git a/Solutions/Microsoft Defender Threat Intelligence/Playbooks/MDTI-Trackers/azuredeploy.json b/Solutions/Microsoft Defender Threat Intelligence/Playbooks/MDTI-Trackers/azuredeploy.json index dcfcb82c20b..5b50378d1fc 100644 --- a/Solutions/Microsoft Defender Threat Intelligence/Playbooks/MDTI-Trackers/azuredeploy.json +++ b/Solutions/Microsoft Defender Threat Intelligence/Playbooks/MDTI-Trackers/azuredeploy.json @@ -628,7 +628,6 @@ "tags": { "LogicAppsCategory": "security", "Source": "MDTI", - "hidden-SentinelWorkspaceId": "/subscriptions/a2c2c31d-ebd4-4880-a60c-d615efa9d201/resourceGroups/Sentinel-CAT/providers/microsoft.OperationalInsights/Workspaces/sentinel-lab4", "hidden-SentinelTemplateName": "MDTI-Data-Trackers", "hidden-SentinelTemplateVersion": "1.0" }, diff --git a/Solutions/ProofPointTap/Package/3.1.1.zip b/Solutions/ProofPointTap/Package/3.1.1.zip index 8601cd69878..53a187c1075 100644 Binary files a/Solutions/ProofPointTap/Package/3.1.1.zip and b/Solutions/ProofPointTap/Package/3.1.1.zip differ diff --git a/Solutions/ProofPointTap/Package/mainTemplate.json b/Solutions/ProofPointTap/Package/mainTemplate.json index 019b5521e7a..5713cc7aaef 100644 --- a/Solutions/ProofPointTap/Package/mainTemplate.json +++ b/Solutions/ProofPointTap/Package/mainTemplate.json @@ -126,7 +126,12 @@ "playbookId4": "[resourceId('Microsoft.Logic/workflows', variables('playbookContentId4'))]", "playbookTemplateSpecName4": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat(parameters('workspace'),'-pl-',uniquestring(variables('_playbookContentId4'))))]", "_playbookcontentProductId4": "[concat(take(variables('_solutionId'),50),'-','pl','-', uniqueString(concat(variables('_solutionId'),'-','Playbook','-',variables('_playbookContentId4'),'-', variables('playbookVersion4'))))]", - "_solutioncontentProductId": "[concat(take(variables('_solutionId'),50),'-','sl','-', uniqueString(concat(variables('_solutionId'),'-','Solution','-',variables('_solutionId'),'-', variables('_solutionVersion'))))]" + "_solutioncontentProductId": "[concat(take(variables('_solutionId'),50),'-','sl','-', uniqueString(concat(variables('_solutionId'),'-','Solution','-',variables('_solutionId'),'-', variables('_solutionVersion'))))]", + "operation": "getCampaignById", + "operation1": "getVAP", + "operation2": "getForensics", + "operation3": "getTopClickers", + "operation4": "decodeURL" }, "resources": [ { @@ -2196,16 +2201,6 @@ } }, "variables": { - "operationId-getCampaignById": "getCampaignById", - "_operationId-getCampaignById": "[[variables('operationId-getCampaignById')]", - "operationId-getVAP": "getVAP", - "_operationId-getVAP": "[[variables('operationId-getVAP')]", - "operationId-getForensics": "getForensics", - "_operationId-getForensics": "[[variables('operationId-getForensics')]", - "operationId-getTopClickers": "getTopClickers", - "_operationId-getTopClickers": "[[variables('operationId-getTopClickers')]", - "operationId-decodeURL": "decodeURL", - "_operationId-decodeURL": "[[variables('operationId-decodeURL')]", "workspace-location-inline": "[concat('[resourceGroup().locatio', 'n]')]", "playbookContentId1": "ProofpointTAPConnector", "playbookId1": "[[resourceId('Microsoft.Web/customApis', parameters('customApis_ProofpointTAP_name'))]", @@ -2274,7 +2269,7 @@ "get": { "summary": "Get Campaign by Id", "description": "Fetch detailed information for a given campaign.", - "operationId": "[[variables('_operationId-getCampaignById')]", + "operationId": "[[variables('operation')]", "produces": [ "application/json" ], @@ -2476,7 +2471,7 @@ "get": { "summary": "Get Very Attacked People", "description": "Fetch the identities and attack index breakdown of Very Attacked People within your organization for a given period.", - "operationId": "[[variables('_operationId-getVAP')]", + "operationId": "[[variables('operation1')]", "produces": [ "application/json" ], @@ -2830,7 +2825,7 @@ }, "summary": "Get Forensics", "description": "Fetch forensic information for a given threat or campaign.", - "operationId": "[[variables('_operationId-getForensics')]", + "operationId": "[[variables('operation2')]", "parameters": [ { "name": "campaignId", @@ -2966,7 +2961,7 @@ }, "summary": "Get top clickers", "description": "Fetch the identities and attack index of the top clickers within your organization for a given period. Top clickers are the users who have demonstrated a tendency to click on malicious URLs, regardless of whether the clicks were blocked or not. Knowing who are more susceptible to threats is useful for proactive security approaches such as security training assignments.", - "operationId": "[[variables('_operationId-getTopClickers')]", + "operationId": "[[variables('operation3')]", "parameters": [ { "name": "window", @@ -3058,7 +3053,7 @@ }, "summary": "Decode URL", "description": "Decodes one or more URLs. Available fields in results vary, depending on if the request is authenticated.", - "operationId": "[[variables('_operationId-decodeURL')]", + "operationId": "[[variables('operation4')]", "parameters": [ { "name": "Content-Type", diff --git a/Solutions/Proofpoint On demand(POD) Email Security/Data Connectors/ProofPointEmailSecurity_CCP/ProofpointPOD_PollingConfig.json b/Solutions/Proofpoint On demand(POD) Email Security/Data Connectors/ProofPointEmailSecurity_CCP/ProofpointPOD_PollingConfig.json index f3968dd049e..38897e877e6 100644 --- a/Solutions/Proofpoint On demand(POD) Email Security/Data Connectors/ProofPointEmailSecurity_CCP/ProofpointPOD_PollingConfig.json +++ b/Solutions/Proofpoint On demand(POD) Email Security/Data Connectors/ProofPointEmailSecurity_CCP/ProofpointPOD_PollingConfig.json @@ -17,9 +17,6 @@ "request": { "httpMethod": "Get", "apiEndpoint": "wss://logstream.proofpoint.com:443/v1/stream", - "startTimeAttributeName": "sinceTime", - "endTimeAttributeName": "toTime", - "queryTimeFormat": "yyyy-MM-ddTHH:mm-0000", "queryParameters": { "cid": "{{clusterId}}", "type": "message" @@ -27,7 +24,6 @@ "rateLimitQPS": 20, "queryWindowInMin": 5, "retryCount": 2, - "timeoutInSeconds": 180, "logResponseContent": true }, "response": { @@ -60,9 +56,6 @@ "request": { "httpMethod": "Get", "apiEndpoint": "wss://logstream.proofpoint.com:443/v1/stream", - "startTimeAttributeName": "sinceTime", - "endTimeAttributeName": "toTime", - "queryTimeFormat": "yyyy-MM-ddTHH:mm-0000", "queryParameters": { "cid": "{{clusterId}}", "type": "maillog" @@ -70,7 +63,6 @@ "rateLimitQPS": 20, "queryWindowInMin": 5, "retryCount": 2, - "timeoutInSeconds": 180, "logResponseContent": true }, "response": { diff --git a/Solutions/Proofpoint On demand(POD) Email Security/Data/Solution_ProofPointPOD.json b/Solutions/Proofpoint On demand(POD) Email Security/Data/Solution_ProofPointPOD.json index aafe4690f5a..51ecca133de 100644 --- a/Solutions/Proofpoint On demand(POD) Email Security/Data/Solution_ProofPointPOD.json +++ b/Solutions/Proofpoint On demand(POD) Email Security/Data/Solution_ProofPointPOD.json @@ -37,7 +37,7 @@ "Data Connectors/ProofPointEmailSecurity_CCP/ProofpointPOD_Definaton.json" ], "BasePath": "C:\\GitHub\\Azure-Sentinel\\solutions\\Proofpoint On demand(POD) Email Security", - "Version": "3.1.1", + "Version": "3.1.2", "Metadata": "SolutionMetadata.json", "TemplateSpec": true, "Is1PConnector": false diff --git a/Solutions/Proofpoint On demand(POD) Email Security/Package/3.1.2.zip b/Solutions/Proofpoint On demand(POD) Email Security/Package/3.1.2.zip new file mode 100644 index 00000000000..7cd55cd05c0 Binary files /dev/null and b/Solutions/Proofpoint On demand(POD) Email Security/Package/3.1.2.zip differ diff --git a/Solutions/Proofpoint On demand(POD) Email Security/Package/mainTemplate.json b/Solutions/Proofpoint On demand(POD) Email Security/Package/mainTemplate.json index 8ec0a65f15d..838a195d541 100644 --- a/Solutions/Proofpoint On demand(POD) Email Security/Package/mainTemplate.json +++ b/Solutions/Proofpoint On demand(POD) Email Security/Package/mainTemplate.json @@ -55,7 +55,7 @@ "email": "azure-support@proofpoint.com", "_email": "[variables('email')]", "_solutionName": "Proofpoint On demand(POD) Email Security", - "_solutionVersion": "3.1.1", + "_solutionVersion": "3.1.2", "solutionId": "proofpointinc1600438591120.azure-sentinel-proofpointpod", "_solutionId": "[variables('solutionId')]", "workbookVersion1": "1.0.0", @@ -210,7 +210,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "ProofpointPOD Workbook with template version 3.1.1", + "description": "ProofpointPOD Workbook with template version 3.1.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('workbookVersion1')]", @@ -302,7 +302,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "ProofpointPOD Data Parser with template version 3.1.1", + "description": "ProofpointPOD Data Parser with template version 3.1.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('parserObject1').parserVersion1]", @@ -434,7 +434,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "ProofpointPODHighScoreAdultValue_HuntingQueries Hunting Query with template version 3.1.1", + "description": "ProofpointPODHighScoreAdultValue_HuntingQueries Hunting Query with template version 3.1.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('huntingQueryObject1').huntingQueryVersion1]", @@ -515,7 +515,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "ProofpointPODHighScoreMalwareValue_HuntingQueries Hunting Query with template version 3.1.1", + "description": "ProofpointPODHighScoreMalwareValue_HuntingQueries Hunting Query with template version 3.1.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('huntingQueryObject2').huntingQueryVersion2]", @@ -596,7 +596,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "ProofpointPODHighScorePhishValue_HuntingQueries Hunting Query with template version 3.1.1", + "description": "ProofpointPODHighScorePhishValue_HuntingQueries Hunting Query with template version 3.1.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('huntingQueryObject3').huntingQueryVersion3]", @@ -677,7 +677,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "ProofpointPODHighScoreSpamValue_HuntingQueries Hunting Query with template version 3.1.1", + "description": "ProofpointPODHighScoreSpamValue_HuntingQueries Hunting Query with template version 3.1.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('huntingQueryObject4').huntingQueryVersion4]", @@ -758,7 +758,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "ProofpointPODHighScoreSuspectValue_HuntingQueries Hunting Query with template version 3.1.1", + "description": "ProofpointPODHighScoreSuspectValue_HuntingQueries Hunting Query with template version 3.1.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('huntingQueryObject5').huntingQueryVersion5]", @@ -839,7 +839,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "ProofpointPODLargeOutboundEmails_HuntingQueries Hunting Query with template version 3.1.1", + "description": "ProofpointPODLargeOutboundEmails_HuntingQueries Hunting Query with template version 3.1.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('huntingQueryObject6').huntingQueryVersion6]", @@ -920,7 +920,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "ProofpointPODRecipientsHighNumberDiscardReject_HuntingQueries Hunting Query with template version 3.1.1", + "description": "ProofpointPODRecipientsHighNumberDiscardReject_HuntingQueries Hunting Query with template version 3.1.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('huntingQueryObject7').huntingQueryVersion7]", @@ -1001,7 +1001,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "ProofpointPODRecipientsLargeNumberOfCorruptedEmails_HuntingQueries Hunting Query with template version 3.1.1", + "description": "ProofpointPODRecipientsLargeNumberOfCorruptedEmails_HuntingQueries Hunting Query with template version 3.1.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('huntingQueryObject8').huntingQueryVersion8]", @@ -1082,7 +1082,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "ProofpointPODSendersLargeNumberOfCorruptedEmails_HuntingQueries Hunting Query with template version 3.1.1", + "description": "ProofpointPODSendersLargeNumberOfCorruptedEmails_HuntingQueries Hunting Query with template version 3.1.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('huntingQueryObject9').huntingQueryVersion9]", @@ -1163,7 +1163,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "ProofpointPODSuspiciousFileTypesInAttachments_HuntingQueries Hunting Query with template version 3.1.1", + "description": "ProofpointPODSuspiciousFileTypesInAttachments_HuntingQueries Hunting Query with template version 3.1.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('huntingQueryObject10').huntingQueryVersion10]", @@ -1244,7 +1244,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "ProofpointPODBinaryInAttachment_AnalyticalRules Analytics Rule with template version 3.1.1", + "description": "ProofpointPODBinaryInAttachment_AnalyticalRules Analytics Rule with template version 3.1.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('analyticRuleObject1').analyticRuleVersion1]", @@ -1286,7 +1286,6 @@ ], "entityMappings": [ { - "entityType": "Account", "fieldMappings": [ { "columnName": "Name", @@ -1296,7 +1295,8 @@ "columnName": "UPNSuffix", "identifier": "UPNSuffix" } - ] + ], + "entityType": "Account" } ] } @@ -1352,7 +1352,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "ProofpointPODDataExfiltrationToPrivateEmail_AnalyticalRules Analytics Rule with template version 3.1.1", + "description": "ProofpointPODDataExfiltrationToPrivateEmail_AnalyticalRules Analytics Rule with template version 3.1.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('analyticRuleObject2').analyticRuleVersion2]", @@ -1394,13 +1394,13 @@ ], "entityMappings": [ { - "entityType": "Account", "fieldMappings": [ { "columnName": "AccountCustomEntity", "identifier": "FullName" } - ] + ], + "entityType": "Account" } ] } @@ -1456,7 +1456,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "ProofpointPODHighRiskNotDiscarded_AnalyticalRules Analytics Rule with template version 3.1.1", + "description": "ProofpointPODHighRiskNotDiscarded_AnalyticalRules Analytics Rule with template version 3.1.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('analyticRuleObject3').analyticRuleVersion3]", @@ -1498,13 +1498,13 @@ ], "entityMappings": [ { - "entityType": "Account", "fieldMappings": [ { "columnName": "AccountCustomEntity", "identifier": "FullName" } - ] + ], + "entityType": "Account" } ] } @@ -1560,7 +1560,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "ProofpointPODMultipleArchivedAttachmentsToSameRecipient_AnalyticalRules Analytics Rule with template version 3.1.1", + "description": "ProofpointPODMultipleArchivedAttachmentsToSameRecipient_AnalyticalRules Analytics Rule with template version 3.1.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('analyticRuleObject4').analyticRuleVersion4]", @@ -1602,13 +1602,13 @@ ], "entityMappings": [ { - "entityType": "Account", "fieldMappings": [ { "columnName": "AccountCustomEntity", "identifier": "FullName" } - ] + ], + "entityType": "Account" } ] } @@ -1664,7 +1664,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "ProofpointPODMultipleLargeEmailsToSameRecipient_AnalyticalRules Analytics Rule with template version 3.1.1", + "description": "ProofpointPODMultipleLargeEmailsToSameRecipient_AnalyticalRules Analytics Rule with template version 3.1.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('analyticRuleObject5').analyticRuleVersion5]", @@ -1706,13 +1706,13 @@ ], "entityMappings": [ { - "entityType": "Account", "fieldMappings": [ { "columnName": "AccountCustomEntity", "identifier": "FullName" } - ] + ], + "entityType": "Account" } ] } @@ -1768,7 +1768,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "ProofpointPODMultipleProtectedEmailsToUnknownRecipient_AnalyticalRules Analytics Rule with template version 3.1.1", + "description": "ProofpointPODMultipleProtectedEmailsToUnknownRecipient_AnalyticalRules Analytics Rule with template version 3.1.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('analyticRuleObject6').analyticRuleVersion6]", @@ -1810,13 +1810,13 @@ ], "entityMappings": [ { - "entityType": "Account", "fieldMappings": [ { "columnName": "AccountCustomEntity", "identifier": "FullName" } - ] + ], + "entityType": "Account" } ] } @@ -1872,7 +1872,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "ProofpointPODSuspiciousAttachment_AnalyticalRules Analytics Rule with template version 3.1.1", + "description": "ProofpointPODSuspiciousAttachment_AnalyticalRules Analytics Rule with template version 3.1.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('analyticRuleObject7').analyticRuleVersion7]", @@ -1914,13 +1914,13 @@ ], "entityMappings": [ { - "entityType": "Account", "fieldMappings": [ { "columnName": "AccountCustomEntity", "identifier": "FullName" } - ] + ], + "entityType": "Account" } ] } @@ -1976,7 +1976,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "ProofpointPODWeakCiphers_AnalyticalRules Analytics Rule with template version 3.1.1", + "description": "ProofpointPODWeakCiphers_AnalyticalRules Analytics Rule with template version 3.1.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('analyticRuleObject8').analyticRuleVersion8]", @@ -2018,13 +2018,13 @@ ], "entityMappings": [ { - "entityType": "IP", "fieldMappings": [ { "columnName": "IPCustomEntity", "identifier": "Address" } - ] + ], + "entityType": "IP" } ] } @@ -2080,7 +2080,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "ProofpointPODEmailSenderInTIList_AnalyticalRules Analytics Rule with template version 3.1.1", + "description": "ProofpointPODEmailSenderInTIList_AnalyticalRules Analytics Rule with template version 3.1.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('analyticRuleObject9').analyticRuleVersion9]", @@ -2136,13 +2136,13 @@ ], "entityMappings": [ { - "entityType": "Account", "fieldMappings": [ { "columnName": "ClientEmail", "identifier": "FullName" } - ] + ], + "entityType": "Account" } ] } @@ -2198,7 +2198,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "ProofpointPODEmailSenderIPinTIList_AnalyticalRules Analytics Rule with template version 3.1.1", + "description": "ProofpointPODEmailSenderIPinTIList_AnalyticalRules Analytics Rule with template version 3.1.2", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('analyticRuleObject10').analyticRuleVersion10]", @@ -2254,22 +2254,22 @@ ], "entityMappings": [ { - "entityType": "Account", "fieldMappings": [ { "columnName": "SrcUserUpn", "identifier": "FullName" } - ] + ], + "entityType": "Account" }, { - "entityType": "IP", "fieldMappings": [ { "columnName": "ClientIP", "identifier": "Address" } - ] + ], + "entityType": "IP" } ] } @@ -2998,9 +2998,6 @@ "request": { "httpMethod": "Get", "apiEndpoint": "wss://logstream.proofpoint.com:443/v1/stream", - "startTimeAttributeName": "sinceTime", - "endTimeAttributeName": "toTime", - "queryTimeFormat": "yyyy-MM-ddTHH:mm-0000", "queryParameters": { "cid": "[[parameters('clusterId')]", "type": "message" @@ -3008,7 +3005,6 @@ "rateLimitQPS": 20, "queryWindowInMin": 5, "retryCount": 2, - "timeoutInSeconds": 180, "logResponseContent": true }, "response": { @@ -3041,9 +3037,6 @@ "request": { "httpMethod": "Get", "apiEndpoint": "wss://logstream.proofpoint.com:443/v1/stream", - "startTimeAttributeName": "sinceTime", - "endTimeAttributeName": "toTime", - "queryTimeFormat": "yyyy-MM-ddTHH:mm-0000", "queryParameters": { "cid": "[[parameters('clusterId')]", "type": "maillog" @@ -3051,7 +3044,6 @@ "rateLimitQPS": 20, "queryWindowInMin": 5, "retryCount": 2, - "timeoutInSeconds": 180, "logResponseContent": true }, "response": { @@ -3082,7 +3074,7 @@ "apiVersion": "2023-04-01-preview", "location": "[parameters('workspace-location')]", "properties": { - "version": "3.1.1", + "version": "3.1.2", "kind": "Solution", "contentSchemaVersion": "3.0.0", "displayName": "Proofpoint On demand(POD) Email Security", diff --git a/Solutions/Proofpoint On demand(POD) Email Security/ReleaseNotes.md b/Solutions/Proofpoint On demand(POD) Email Security/ReleaseNotes.md index db9afdef371..5eeb537c7e1 100644 --- a/Solutions/Proofpoint On demand(POD) Email Security/ReleaseNotes.md +++ b/Solutions/Proofpoint On demand(POD) Email Security/ReleaseNotes.md @@ -1,5 +1,6 @@ | **Version** | **Date Modified (DD-MM-YYYY)** | **Change History** | |-------------|--------------------------------|------------------------------------------------------| +| 3.1.2 | 08-12-2025 | Update **ProofpointPOD_PollingConfig.json** to remove start and end time query params, it impacts time frames at server side and causes duplicate data ingestion.| | 3.1.1 | 03-11-2025 | Update support url in **SolutionMetadata.json**.| | 3.1.0 | 31-07-2025 | Updated Support details and publisherId in **SolutionMetadata.json**, updated Author details and Logo in **Solution_ProofPointPOD.json** from Microsoft to Proofpoint.| | 3.0.5 | 28-07-2025 | Removed Deprecated **Data Connector**. | diff --git a/Solutions/SAP/Agentless/README.md b/Solutions/SAP/Agentless/README.md index 863d4c93702..9dc3269b248 100644 --- a/Solutions/SAP/Agentless/README.md +++ b/Solutions/SAP/Agentless/README.md @@ -4,6 +4,7 @@ Microsoft Sentinel Solution for SAP ERP and S/4HANA. Used with SAP Integration S | **Version** | **Date Modified (DD-MM-YYYY)** | **Comments** | | --- | --- | --- | +| 1.1.8 | 01-09-2025 | Update Heartbeat to Log Analytics V2 and added support for audit log user exclusions | | 1.1.7 | 01-09-2025 | Add max-rows parameter | | 1.1.6 | 21-08-2025 | SAP server time zone awareness and data connector heartbeat added | | 1.1.5 | 02-05-2025 | Added metadata for SAP certification | diff --git a/Solutions/SAP/Agentless/package-1.1.8.zip b/Solutions/SAP/Agentless/package-1.1.8.zip new file mode 100644 index 00000000000..26075b58e89 Binary files /dev/null and b/Solutions/SAP/Agentless/package-1.1.8.zip differ diff --git a/Solutions/SOC Prime CCF/Data Connectors/SOCPrime_ccp/SOCPrime_DCR.json b/Solutions/SOC Prime CCF/Data Connectors/SOCPrime_ccp/SOCPrime_DCR.json new file mode 100644 index 00000000000..5223696ee87 --- /dev/null +++ b/Solutions/SOC Prime CCF/Data Connectors/SOCPrime_ccp/SOCPrime_DCR.json @@ -0,0 +1,64 @@ +{ + "name": "SOCPrimeLogsDCR", + "apiVersion": "2021-09-01-preview", + "type": "Microsoft.Insights/dataCollectionRules", + "location": "{{location}}", + "kind": null, + "properties": { + "streamDeclarations": { + "Custom-SOCPrimeAuditLogsStreamAgent": { + "columns": [ + { + "name": "timestamp", + "type": "datetime" + }, + { + "name": "event_name", + "type": "string" + }, + { + "name": "user_email", + "type": "string" + }, + { + "name": "user_name", + "type": "string" + }, + { + "name": "event_page", + "type": "string" + }, + { + "name": "source_ip", + "type": "string" + }, + { + "name": "user_agent", + "type": "string" + } + ] + } + }, + "destinations": { + "logAnalytics": [ + { + "workspaceResourceId": "[variables('workspaceResourceId')]", + "name": "clv2ws1" + } + ] + }, + "dataFlows": [ + { + "streams": [ + "Custom-SOCPrimeAuditLogsStreamAgent" + ], + "destinations": [ + "clv2ws1" + ], + "transformKql": "source | extend EventType = 'event'| extend EventVendor = 'SOC Prime' | extend EventProduct = 'TDM Audit Logs'| project TimeGenerated = timestamp, EventName = event_name, UserName=user_name, HttpUserAgent = user_agent, Uri=event_page, UserEmail=user_email, SourceIp=source_ip", + "outputStream": "Custom-SOCPrimeAuditLogs_CL" + } + ], + "dataCollectionEndpointId": "[concat('/subscriptions/',parameters('subscription'),'/resourceGroups/',parameters('resourceGroupName'),'/providers/Microsoft.Insights/dataCollectionEndpoints/',parameters('workspace'))]" + } +} \ No newline at end of file diff --git a/Solutions/SOC Prime CCF/Data Connectors/SOCPrime_ccp/SOCPrime_DataConnectorDefinition.json b/Solutions/SOC Prime CCF/Data Connectors/SOCPrime_ccp/SOCPrime_DataConnectorDefinition.json new file mode 100644 index 00000000000..4f971e10fba --- /dev/null +++ b/Solutions/SOC Prime CCF/Data Connectors/SOCPrime_ccp/SOCPrime_DataConnectorDefinition.json @@ -0,0 +1,98 @@ +{ + "name": "SOCPrimeAuditLogsDataConnector", + "apiVersion": "2022-09-01-preview", + "type": "Microsoft.SecurityInsights/dataConnectorDefinitions", + "location": "{{location}}", + "kind": "Customizable", + "properties": { + "connectorUiConfig": { + "id": "SOCPrimeAuditLogsDataConnector", + "title": "SOC Prime Platform Audit Logs Data Connector", + "publisher": "Microsoft", + "descriptionMarkdown": "The [SOC Prime Audit Logs](https://help.socprime.com/en/articles/6265791-api) data connector allows ingesting logs from the SOC Prime Platform API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the SOC Prime Platform API to fetch SOC Prime platform audit logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table, thus resulting in better performance.", + "graphQueries": [ + { + "metricName": "Total Audit Logs received", + "legend": "SOC Prime Audit Logs", + "baseQuery": "SOCPrimeAuditLogs_CL" + } + ], + "sampleQueries": [ + { + "description": "Get Sample of SOC Prime logs", + "query": "SOCPrimeAuditLogs_CL | take 10" + } + ], + "dataTypes": [ + { + "name": "SOCPrimeAuditLogs_CL", + "lastDataReceivedQuery": "SOCPrimeAuditLogs_CL\n | where TimeGenerated > ago(12h) | summarize Time = max(TimeGenerated)\n | where isnotempty(Time)" + } + ], + "connectivityCriteria": [ + { + "type": "HasDataConnectors", + "value": null + } + ], + "availability": { + "status": 1, + "isPreview": false + }, + "permissions": { + "tenant": null, + "licenses": null, + "resourceProvider": [ + { + "provider": "Microsoft.OperationalInsights/workspaces", + "permissionsDisplayText": "Read and Write permissions are required.", + "providerDisplayName": "Workspace", + "scope": "Workspace", + "requiredPermissions": { + "read": true, + "write": true, + "delete": true, + "action": false + } + } + ] + }, + "instructionSteps": [ + { + "instructions": [ + { + "type": "Markdown", + "parameters": { + "content": "#### Configuration steps for the SOC Prime Platform API \n Follow the instructions to obtain the credentials. you can also follow this [guide](https://help.socprime.com/en/articles/6265791-api#h_8a0d20b204) to generate personal API key." + } + }, + { + "type": "Markdown", + "parameters": { + "content": "#### Retrieve API Key\n 1. Log in to the SOC Prime Platform\n 2. Click [**Account**] icon -> [**Platform Settings**] -> [**API**] \n 3. Click [**Add New Key**] \n 4. In the modal that appears give your key a meaningful name, set expiration date and product APIs the key provides access to \n 5. Click on [**Generate**] \n 6. Copy the key and save it in a safe place. You won't be able to view it again once you close this modal " + } + }, + { + "type": "Textbox", + "parameters": { + "label": "SOC Prime API Key", + "placeholder": "API Key", + "type": "password", + "name": "apitoken" + } + }, + { + "parameters": { + "label": "toggle", + "name": "toggle" + }, + "type": "ConnectionToggleButton" + } + ], + "innerSteps": null + } + ], + "isConnectivityCriteriasMatchSome": false + } + } +} \ No newline at end of file diff --git a/Solutions/SOC Prime CCF/Data Connectors/SOCPrime_ccp/SOCPrime_PollingConfig.json b/Solutions/SOC Prime CCF/Data Connectors/SOCPrime_ccp/SOCPrime_PollingConfig.json new file mode 100644 index 00000000000..f590270a908 --- /dev/null +++ b/Solutions/SOC Prime CCF/Data Connectors/SOCPrime_ccp/SOCPrime_PollingConfig.json @@ -0,0 +1,51 @@ +[ + { + "type": "Microsoft.SecurityInsights/dataConnectors", + "apiVersion": "2021-10-01-preview", + "name": "SOCPrimeAuditLogs", + "location": "{{location}}", + "kind": "RestApiPoller", + "properties": { + "connectorDefinitionName": "SOCPrimeAuditLogsDataConnector", + "dataType": "SOCPrimeAuditLogs_CL", + "auth": { + "type": "APIKey", + "ApiKey": "[[parameters('apitoken')]", + "ApiKeyName": "client_secret_id" + }, + "request": { + "apiEndpoint": "https://api.tdm.socprime.com/v1/audit-logs", + "headers": { + "Accept": "application/json", + "User-Agent": "SOCPrime_UA" + }, + "httpMethod": "Get", + "queryWindowInMin": 10, + "retryCount": 3, + "rateLimitQPS": 1, + "StartTimeAttributeName": "date_from", + "EndTimeAttributeName": "date_to" + + }, + "response": { + "format": "json", + "eventsJsonPaths": [ + "$.events" + ] + }, + "dcrConfig": { + "streamName": "Custom-SOCPrimeAuditLogsStreamAgent", + "dataCollectionEndpoint": "[[parameters('dcrConfig').dataCollectionEndpoint]", + "dataCollectionRuleImmutableId": "[[parameters('dcrConfig').dataCollectionRuleImmutableId]" + }, + "paging": { + "pagingType": "NextPageToken", + "pageSizeParameterName": "size", + "pageSize": 100, + "nextPageTokenJsonPath" : "$.next_page_token", + "nextPageParaName": "next_page_token" + } + } + } + +] diff --git a/Solutions/SOC Prime CCF/Data Connectors/SOCPrime_ccp/SOCPrime_table.json b/Solutions/SOC Prime CCF/Data Connectors/SOCPrime_ccp/SOCPrime_table.json new file mode 100644 index 00000000000..25006a1568d --- /dev/null +++ b/Solutions/SOC Prime CCF/Data Connectors/SOCPrime_ccp/SOCPrime_table.json @@ -0,0 +1,57 @@ +{ + "name": "SOCPrimeAuditLogs_CL", + "apiVersion": "2021-03-01-privatepreview", + "type": "Microsoft.OperationalInsights/workspaces/tables", + "location": "{{location}}", + "kind": null, + "properties": { + "schema": { + "name": "SOCPrimeAuditLogs_CL", + "columns": [ + { + "name": "TimeGenerated", + "type": "Datetime", + "isDefaultDisplay": true, + "description": "The timestamp (UTC) reflecting the time in which the event was generated." + }, + { + "name": "EventVendor", + "type": "string" + }, + { + "name": "EventType", + "type": "string" + }, + { + "name": "EventProduct", + "type": "string" + }, + { + "name": "EventName", + "type": "string", + "description": "Event Name" + }, + { + "name": "UserEmail", + "type": "string" + }, + { + "name": "UserName", + "type": "string" + }, + { + "name": "Uri", + "type": "string" + }, + { + "name": "SourceIp", + "type": "string" + }, + { + "name": "HttpUserAgent", + "type": "string" + } + ] + } + } +} \ No newline at end of file diff --git a/Solutions/SOC Prime CCF/Data/Solution_SOCPrimeAuditLogs.json b/Solutions/SOC Prime CCF/Data/Solution_SOCPrimeAuditLogs.json new file mode 100644 index 00000000000..c44c3c26698 --- /dev/null +++ b/Solutions/SOC Prime CCF/Data/Solution_SOCPrimeAuditLogs.json @@ -0,0 +1,14 @@ +{ + "Name": "SOC Prime CCF", + "Author": "SOC Prime - support@socprime.com", + "Logo": "", + "Description": "The [SOC Prime Audit Logs](https://help.socprime.com/en/articles/6265791-api) data connector allows ingesting logs from the SOC Prime Platform API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the SOC Prime Platform API to fetch SOC Prime platform audit logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table, thus resulting in better performance.", + "Data Connectors": [ + "Data Connectors/SOCPrime_ccp/SOCPrime_DataConnectorDefinition.json" + ], + "BasePath": "C:\\GitHub\\Azure-Sentinel\\Solutions\\SOC Prime CCF", + "Version": "3.0.0", + "Metadata": "SolutionMetadata.json", + "TemplateSpec": false, + "Is1PConnector": false +} diff --git a/Solutions/SOC Prime CCF/Package/3.0.0.zip b/Solutions/SOC Prime CCF/Package/3.0.0.zip new file mode 100644 index 00000000000..53084ed48a5 Binary files /dev/null and b/Solutions/SOC Prime CCF/Package/3.0.0.zip differ diff --git a/Solutions/SOC Prime CCF/Package/createUiDefinition.json b/Solutions/SOC Prime CCF/Package/createUiDefinition.json new file mode 100644 index 00000000000..bb31e4b7e93 --- /dev/null +++ b/Solutions/SOC Prime CCF/Package/createUiDefinition.json @@ -0,0 +1,85 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/0.1.2-preview/CreateUIDefinition.MultiVm.json#", + "handler": "Microsoft.Azure.CreateUIDef", + "version": "0.1.2-preview", + "parameters": { + "config": { + "isWizard": false, + "basics": { + "description": "\n\n**Note:** Please refer to the following before installing the solution: \n\n• Review the solution [Release Notes](https://github.com/Azure/Azure-Sentinel/tree/master/Solutions/SOC%20Prime%20CCF/ReleaseNotes.md)\n\n • There may be [known issues](https://aka.ms/sentinelsolutionsknownissues) pertaining to this Solution, please refer to them before installing.\n\nThe [SOC Prime Audit Logs](https://help.socprime.com/en/articles/6265791-api) data connector allows ingesting logs from the SOC Prime Platform API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the SOC Prime Platform API to fetch SOC Prime platform audit logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table, thus resulting in better performance.\n\n**Data Connectors:** 1\n\n[Learn more about Microsoft Sentinel](https://aka.ms/azuresentinel) | [Learn more about Solutions](https://aka.ms/azuresentinelsolutionsdoc)", + "subscription": { + "resourceProviders": [ + "Microsoft.OperationsManagement/solutions", + "Microsoft.OperationalInsights/workspaces/providers/alertRules", + "Microsoft.Insights/workbooks", + "Microsoft.Logic/workflows" + ] + }, + "location": { + "metadata": { + "hidden": "Hiding location, we get it from the log analytics workspace" + }, + "visible": false + }, + "resourceGroup": { + "allowExisting": true + } + } + }, + "basics": [ + { + "name": "getLAWorkspace", + "type": "Microsoft.Solutions.ArmApiControl", + "toolTip": "This filters by workspaces that exist in the Resource Group selected", + "condition": "[greater(length(resourceGroup().name),0)]", + "request": { + "method": "GET", + "path": "[concat(subscription().id,'/providers/Microsoft.OperationalInsights/workspaces?api-version=2020-08-01')]" + } + }, + { + "name": "workspace", + "type": "Microsoft.Common.DropDown", + "label": "Workspace", + "placeholder": "Select a workspace", + "toolTip": "This dropdown will list only workspace that exists in the Resource Group selected", + "constraints": { + "allowedValues": "[map(filter(basics('getLAWorkspace').value, (filter) => contains(toLower(filter.id), toLower(resourceGroup().name))), (item) => parse(concat('{\"label\":\"', item.name, '\",\"value\":\"', item.name, '\"}')))]", + "required": true + }, + "visible": true + } + ], + "steps": [ + { + "name": "dataconnectors", + "label": "Data Connectors", + "bladeTitle": "Data Connectors", + "elements": [ + { + "name": "dataconnectors1-text", + "type": "Microsoft.Common.TextBlock", + "options": { + "text": "This Solution installs the data connector for SOC Prime Platform Audit Logs Data Connector. You can get SOC Prime Platform Audit Logs Data Connector data in your Microsoft Sentinel workspace. After installing the solution, configure and enable this data connector by following guidance in Manage solution view." + } + }, + { + "name": "dataconnectors-link1", + "type": "Microsoft.Common.TextBlock", + "options": { + "link": { + "label": "Learn more about connecting data sources", + "uri": "https://docs.microsoft.com/azure/sentinel/connect-data-sources" + } + } + } + ] + } + ], + "outputs": { + "workspace-location": "[first(map(filter(basics('getLAWorkspace').value, (filter) => and(contains(toLower(filter.id), toLower(resourceGroup().name)),equals(filter.name,basics('workspace')))), (item) => item.location))]", + "location": "[location()]", + "workspace": "[basics('workspace')]" + } + } +} diff --git a/Solutions/SOC Prime CCF/Package/mainTemplate.json b/Solutions/SOC Prime CCF/Package/mainTemplate.json new file mode 100644 index 00000000000..a802e0fb32c --- /dev/null +++ b/Solutions/SOC Prime CCF/Package/mainTemplate.json @@ -0,0 +1,663 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "metadata": { + "author": "SOC Prime - support@socprime.com", + "comments": "Solution template for SOC Prime CCF" + }, + "parameters": { + "location": { + "type": "string", + "minLength": 1, + "defaultValue": "[resourceGroup().location]", + "metadata": { + "description": "Not used, but needed to pass arm-ttk test `Location-Should-Not-Be-Hardcoded`. We instead use the `workspace-location` which is derived from the LA workspace" + } + }, + "workspace-location": { + "type": "string", + "defaultValue": "", + "metadata": { + "description": "[concat('Region to deploy solution resources -- separate from location selection',parameters('location'))]" + } + }, + "workspace": { + "defaultValue": "", + "type": "string", + "metadata": { + "description": "Workspace name for Log Analytics where Microsoft Sentinel is setup" + } + }, + "resourceGroupName": { + "type": "string", + "defaultValue": "[resourceGroup().name]", + "metadata": { + "description": "resource group name where Microsoft Sentinel is setup" + } + }, + "subscription": { + "type": "string", + "defaultValue": "[last(split(subscription().id, '/'))]", + "metadata": { + "description": "subscription id where Microsoft Sentinel is setup" + } + } + }, + "variables": { + "email": "support@socprime.com", + "_email": "[variables('email')]", + "_solutionName": "SOC Prime CCF", + "_solutionVersion": "3.0.0", + "solutionId": "socprime.azure-sentinel-solution-socprimeauditccp", + "_solutionId": "[variables('solutionId')]", + "workspaceResourceId": "[resourceId('microsoft.OperationalInsights/Workspaces', parameters('workspace'))]", + "dataConnectorCCPVersion": "1.0.0", + "_dataConnectorContentIdConnectorDefinition1": "SOCPrimeAuditLogsDataConnector", + "dataConnectorTemplateNameConnectorDefinition1": "[concat(parameters('workspace'),'-dc-',uniquestring(variables('_dataConnectorContentIdConnectorDefinition1')))]", + "_dataConnectorContentIdConnections1": "SOCPrimeAuditLogsDataConnectorConnections", + "dataConnectorTemplateNameConnections1": "[concat(parameters('workspace'),'-dc-',uniquestring(variables('_dataConnectorContentIdConnections1')))]", + "blanks": "[replace('b', 'b', '')]", + "_solutioncontentProductId": "[concat(take(variables('_solutionId'),50),'-','sl','-', uniqueString(concat(variables('_solutionId'),'-','Solution','-',variables('_solutionId'),'-', variables('_solutionVersion'))))]" + }, + "resources": [ + { + "type": "Microsoft.OperationalInsights/workspaces/providers/contentTemplates", + "apiVersion": "2023-04-01-preview", + "name": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/', variables('dataConnectorTemplateNameConnectorDefinition1'), variables('dataConnectorCCPVersion'))]", + "location": "[parameters('workspace-location')]", + "dependsOn": [ + "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" + ], + "properties": { + "contentId": "[variables('_dataConnectorContentIdConnectorDefinition1')]", + "displayName": "SOC Prime Platform Audit Logs Data Connector", + "contentKind": "DataConnector", + "mainTemplate": { + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "[variables('dataConnectorCCPVersion')]", + "parameters": {}, + "variables": {}, + "resources": [ + { + "name": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',variables('_dataConnectorContentIdConnectorDefinition1'))]", + "apiVersion": "2025-09-01", + "type": "Microsoft.OperationalInsights/workspaces/providers/dataConnectorDefinitions", + "location": "[parameters('workspace-location')]", + "kind": "Customizable", + "properties": { + "connectorUiConfig": { + "id": "SOCPrimeAuditLogsDataConnector", + "title": "SOC Prime Platform Audit Logs Data Connector", + "publisher": "Microsoft", + "descriptionMarkdown": "The [SOC Prime Audit Logs](https://help.socprime.com/en/articles/6265791-api) data connector allows ingesting logs from the SOC Prime Platform API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the SOC Prime Platform API to fetch SOC Prime platform audit logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table, thus resulting in better performance.", + "graphQueries": [ + { + "metricName": "Total Audit Logs received", + "legend": "SOC Prime Audit Logs", + "baseQuery": "SOCPrimeAuditLogs_CL" + } + ], + "sampleQueries": [ + { + "description": "Get Sample of SOC Prime logs", + "query": "SOCPrimeAuditLogs_CL | take 10" + } + ], + "dataTypes": [ + { + "name": "SOCPrimeAuditLogs_CL", + "lastDataReceivedQuery": "SOCPrimeAuditLogs_CL\n | where TimeGenerated > ago(12h) | summarize Time = max(TimeGenerated)\n | where isnotempty(Time)" + } + ], + "connectivityCriteria": [ + { + "type": "HasDataConnectors", + "value": null + } + ], + "availability": { + "status": 1, + "isPreview": false + }, + "permissions": { + "resourceProvider": [ + { + "provider": "Microsoft.OperationalInsights/workspaces", + "permissionsDisplayText": "Read and Write permissions are required.", + "providerDisplayName": "Workspace", + "scope": "Workspace", + "requiredPermissions": { + "read": true, + "write": true, + "delete": true, + "action": false + } + } + ] + }, + "instructionSteps": [ + { + "instructions": [ + { + "type": "Markdown", + "parameters": { + "content": "#### Configuration steps for the SOC Prime Platform API \n Follow the instructions to obtain the credentials. you can also follow this [guide](https://help.socprime.com/en/articles/6265791-api#h_8a0d20b204) to generate personal API key." + } + }, + { + "type": "Markdown", + "parameters": { + "content": "#### Retrieve API Key\n 1. Log in to the SOC Prime Platform\n 2. Click [**Account**] icon -> [**Platform Settings**] -> [**API**] \n 3. Click [**Add New Key**] \n 4. In the modal that appears give your key a meaningful name, set expiration date and product APIs the key provides access to \n 5. Click on [**Generate**] \n 6. Copy the key and save it in a safe place. You won't be able to view it again once you close this modal " + } + }, + { + "type": "Textbox", + "parameters": { + "label": "SOC Prime API Key", + "placeholder": "API Key", + "type": "password", + "name": "apitoken" + } + }, + { + "parameters": { + "label": "toggle", + "name": "toggle" + }, + "type": "ConnectionToggleButton" + } + ] + } + ], + "isConnectivityCriteriasMatchSome": false + } + } + }, + { + "name": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat('DataConnector-', variables('_dataConnectorContentIdConnectorDefinition1')))]", + "apiVersion": "2022-01-01-preview", + "type": "Microsoft.OperationalInsights/workspaces/providers/metadata", + "properties": { + "parentId": "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/dataConnectorDefinitions', variables('_dataConnectorContentIdConnectorDefinition1'))]", + "contentId": "[variables('_dataConnectorContentIdConnectorDefinition1')]", + "kind": "DataConnector", + "version": "[variables('dataConnectorCCPVersion')]", + "source": { + "sourceId": "[variables('_solutionId')]", + "name": "[variables('_solutionName')]", + "kind": "Solution" + }, + "author": { + "name": "SOC Prime", + "email": "[variables('_email')]" + }, + "support": { + "tier": "Partner", + "name": "SOC Prime", + "email": "support@socprime.com", + "link": "https://socprime.com/" + }, + "dependencies": { + "criteria": [ + { + "version": "[variables('dataConnectorCCPVersion')]", + "contentId": "[variables('_dataConnectorContentIdConnections1')]", + "kind": "ResourcesDataConnector" + } + ] + } + } + }, + { + "name": "SOCPrimeLogsDCR", + "apiVersion": "2024-03-11", + "type": "Microsoft.Insights/dataCollectionRules", + "location": "[parameters('workspace-location')]", + "kind": "[variables('blanks')]", + "properties": { + "streamDeclarations": { + "Custom-SOCPrimeAuditLogsStreamAgent": { + "columns": [ + { + "name": "timestamp", + "type": "datetime" + }, + { + "name": "event_name", + "type": "string" + }, + { + "name": "user_email", + "type": "string" + }, + { + "name": "user_name", + "type": "string" + }, + { + "name": "event_page", + "type": "string" + }, + { + "name": "source_ip", + "type": "string" + }, + { + "name": "user_agent", + "type": "string" + } + ] + } + }, + "destinations": { + "logAnalytics": [ + { + "workspaceResourceId": "[variables('workspaceResourceId')]", + "name": "clv2ws1" + } + ] + }, + "dataFlows": [ + { + "streams": [ + "Custom-SOCPrimeAuditLogsStreamAgent" + ], + "destinations": [ + "clv2ws1" + ], + "transformKql": "source | extend EventType = 'event'| extend EventVendor = 'SOC Prime' | extend EventProduct = 'TDM Audit Logs'| project TimeGenerated = timestamp, EventName = event_name, UserName=user_name, HttpUserAgent = user_agent, Uri=event_page, UserEmail=user_email, SourceIp=source_ip", + "outputStream": "Custom-SOCPrimeAuditLogs_CL" + } + ], + "dataCollectionEndpointId": "[concat('/subscriptions/',parameters('subscription'),'/resourceGroups/',parameters('resourceGroupName'),'/providers/Microsoft.Insights/dataCollectionEndpoints/',parameters('workspace'))]" + } + }, + { + "name": "SOCPrimeAuditLogs_CL", + "apiVersion": "2025-07-01", + "type": "Microsoft.OperationalInsights/workspaces/tables", + "location": "[parameters('workspace-location')]", + "kind": null, + "properties": { + "schema": { + "name": "SOCPrimeAuditLogs_CL", + "columns": [ + { + "name": "TimeGenerated", + "type": "Datetime", + "isDefaultDisplay": true, + "description": "The timestamp (UTC) reflecting the time in which the event was generated." + }, + { + "name": "EventVendor", + "type": "string" + }, + { + "name": "EventType", + "type": "string" + }, + { + "name": "EventProduct", + "type": "string" + }, + { + "name": "EventName", + "type": "string", + "description": "Event Name" + }, + { + "name": "UserEmail", + "type": "string" + }, + { + "name": "UserName", + "type": "string" + }, + { + "name": "Uri", + "type": "string" + }, + { + "name": "SourceIp", + "type": "string" + }, + { + "name": "HttpUserAgent", + "type": "string" + } + ] + } + } + } + ] + }, + "packageKind": "Solution", + "packageVersion": "[variables('_solutionVersion')]", + "packageName": "[variables('_solutionName')]", + "contentProductId": "[concat(take(variables('_solutionId'), 50),'-','dc','-', uniqueString(concat(variables('_solutionId'),'-','DataConnector','-',variables('_dataConnectorContentIdConnectorDefinition1'),'-', variables('dataConnectorCCPVersion'))))]", + "packageId": "[variables('_solutionId')]", + "contentSchemaVersion": "3.0.0", + "version": "[variables('dataConnectorCCPVersion')]" + } + }, + { + "name": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',variables('_dataConnectorContentIdConnectorDefinition1'))]", + "apiVersion": "2025-09-01", + "type": "Microsoft.OperationalInsights/workspaces/providers/dataConnectorDefinitions", + "location": "[parameters('workspace-location')]", + "kind": "Customizable", + "properties": { + "connectorUiConfig": { + "id": "SOCPrimeAuditLogsDataConnector", + "title": "SOC Prime Platform Audit Logs Data Connector", + "publisher": "Microsoft", + "descriptionMarkdown": "The [SOC Prime Audit Logs](https://help.socprime.com/en/articles/6265791-api) data connector allows ingesting logs from the SOC Prime Platform API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the SOC Prime Platform API to fetch SOC Prime platform audit logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table, thus resulting in better performance.", + "graphQueries": [ + { + "metricName": "Total Audit Logs received", + "legend": "SOC Prime Audit Logs", + "baseQuery": "SOCPrimeAuditLogs_CL" + } + ], + "sampleQueries": [ + { + "description": "Get Sample of SOC Prime logs", + "query": "SOCPrimeAuditLogs_CL | take 10" + } + ], + "dataTypes": [ + { + "name": "SOCPrimeAuditLogs_CL", + "lastDataReceivedQuery": "SOCPrimeAuditLogs_CL\n | where TimeGenerated > ago(12h) | summarize Time = max(TimeGenerated)\n | where isnotempty(Time)" + } + ], + "connectivityCriteria": [ + { + "type": "HasDataConnectors", + "value": null + } + ], + "availability": { + "status": 1, + "isPreview": false + }, + "permissions": { + "resourceProvider": [ + { + "provider": "Microsoft.OperationalInsights/workspaces", + "permissionsDisplayText": "Read and Write permissions are required.", + "providerDisplayName": "Workspace", + "scope": "Workspace", + "requiredPermissions": { + "read": true, + "write": true, + "delete": true, + "action": false + } + } + ] + }, + "instructionSteps": [ + { + "instructions": [ + { + "type": "Markdown", + "parameters": { + "content": "#### Configuration steps for the SOC Prime Platform API \n Follow the instructions to obtain the credentials. you can also follow this [guide](https://help.socprime.com/en/articles/6265791-api#h_8a0d20b204) to generate personal API key." + } + }, + { + "type": "Markdown", + "parameters": { + "content": "#### Retrieve API Key\n 1. Log in to the SOC Prime Platform\n 2. Click [**Account**] icon -> [**Platform Settings**] -> [**API**] \n 3. Click [**Add New Key**] \n 4. In the modal that appears give your key a meaningful name, set expiration date and product APIs the key provides access to \n 5. Click on [**Generate**] \n 6. Copy the key and save it in a safe place. You won't be able to view it again once you close this modal " + } + }, + { + "type": "Textbox", + "parameters": { + "label": "SOC Prime API Key", + "placeholder": "API Key", + "type": "password", + "name": "apitoken" + } + }, + { + "parameters": { + "label": "toggle", + "name": "toggle" + }, + "type": "ConnectionToggleButton" + } + ] + } + ], + "isConnectivityCriteriasMatchSome": false + } + } + }, + { + "name": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat('DataConnector-', variables('_dataConnectorContentIdConnectorDefinition1')))]", + "apiVersion": "2022-01-01-preview", + "type": "Microsoft.OperationalInsights/workspaces/providers/metadata", + "properties": { + "parentId": "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/dataConnectorDefinitions', variables('_dataConnectorContentIdConnectorDefinition1'))]", + "contentId": "[variables('_dataConnectorContentIdConnectorDefinition1')]", + "kind": "DataConnector", + "version": "[variables('dataConnectorCCPVersion')]", + "source": { + "sourceId": "[variables('_solutionId')]", + "name": "[variables('_solutionName')]", + "kind": "Solution" + }, + "author": { + "name": "SOC Prime", + "email": "[variables('_email')]" + }, + "support": { + "tier": "Partner", + "name": "SOC Prime", + "email": "support@socprime.com", + "link": "https://socprime.com/" + }, + "dependencies": { + "criteria": [ + { + "version": "[variables('dataConnectorCCPVersion')]", + "contentId": "[variables('_dataConnectorContentIdConnections1')]", + "kind": "ResourcesDataConnector" + } + ] + } + } + }, + { + "type": "Microsoft.OperationalInsights/workspaces/providers/contentTemplates", + "apiVersion": "2023-04-01-preview", + "name": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/', variables('dataConnectorTemplateNameConnections1'), variables('dataConnectorCCPVersion'))]", + "location": "[parameters('workspace-location')]", + "dependsOn": [ + "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" + ], + "properties": { + "contentId": "[variables('_dataConnectorContentIdConnections1')]", + "displayName": "SOC Prime Platform Audit Logs Data Connector", + "contentKind": "ResourcesDataConnector", + "mainTemplate": { + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "[variables('dataConnectorCCPVersion')]", + "parameters": { + "guidValue": { + "defaultValue": "[[newGuid()]", + "type": "securestring" + }, + "innerWorkspace": { + "defaultValue": "[parameters('workspace')]", + "type": "securestring" + }, + "connectorDefinitionName": { + "defaultValue": "SOC Prime Platform Audit Logs Data Connector", + "type": "securestring", + "minLength": 1 + }, + "workspace": { + "defaultValue": "[parameters('workspace')]", + "type": "securestring" + }, + "dcrConfig": { + "defaultValue": { + "dataCollectionEndpoint": "data collection Endpoint", + "dataCollectionRuleImmutableId": "data collection rule immutableId" + }, + "type": "object" + }, + "apitoken": { + "defaultValue": "apitoken", + "type": "securestring", + "minLength": 1 + } + }, + "variables": { + "_dataConnectorContentIdConnections1": "[variables('_dataConnectorContentIdConnections1')]" + }, + "resources": [ + { + "name": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat('DataConnector-', variables('_dataConnectorContentIdConnections1')))]", + "apiVersion": "2022-01-01-preview", + "type": "Microsoft.OperationalInsights/workspaces/providers/metadata", + "properties": { + "parentId": "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/dataConnectors', variables('_dataConnectorContentIdConnections1'))]", + "contentId": "[variables('_dataConnectorContentIdConnections1')]", + "kind": "ResourcesDataConnector", + "version": "[variables('dataConnectorCCPVersion')]", + "source": { + "sourceId": "[variables('_solutionId')]", + "name": "[variables('_solutionName')]", + "kind": "Solution" + }, + "author": { + "name": "SOC Prime", + "email": "[variables('_email')]" + }, + "support": { + "tier": "Partner", + "name": "SOC Prime", + "email": "support@socprime.com", + "link": "https://socprime.com/" + } + } + }, + { + "name": "[[concat(parameters('innerWorkspace'),'/Microsoft.SecurityInsights/', 'SOCPrimeAuditLogs', parameters('guidValue'))]", + "apiVersion": "2025-09-01", + "type": "Microsoft.OperationalInsights/workspaces/providers/dataConnectors", + "location": "[parameters('workspace-location')]", + "kind": "RestApiPoller", + "properties": { + "connectorDefinitionName": "SOCPrimeAuditLogsDataConnector", + "dataType": "SOCPrimeAuditLogs_CL", + "auth": { + "type": "APIKey", + "ApiKey": "[[parameters('apitoken')]", + "ApiKeyName": "client_secret_id" + }, + "request": { + "apiEndpoint": "https://api.tdm.socprime.com/v1/audit-logs", + "headers": { + "Accept": "application/json", + "User-Agent": "SOCPrime_UA" + }, + "httpMethod": "Get", + "queryWindowInMin": 10, + "retryCount": 3, + "rateLimitQPS": 1, + "StartTimeAttributeName": "date_from", + "EndTimeAttributeName": "date_to" + }, + "response": { + "format": "json", + "eventsJsonPaths": [ + "$.events" + ] + }, + "dcrConfig": { + "streamName": "Custom-SOCPrimeAuditLogsStreamAgent", + "dataCollectionEndpoint": "[[parameters('dcrConfig').dataCollectionEndpoint]", + "dataCollectionRuleImmutableId": "[[parameters('dcrConfig').dataCollectionRuleImmutableId]" + }, + "paging": { + "pagingType": "NextPageToken", + "pageSizeParameterName": "size", + "pageSize": 100, + "nextPageTokenJsonPath": "$.next_page_token", + "nextPageParaName": "next_page_token" + } + } + } + ] + }, + "packageKind": "Solution", + "packageVersion": "[variables('_solutionVersion')]", + "packageName": "[variables('_solutionName')]", + "contentProductId": "[concat(take(variables('_solutionId'), 50),'-','rdc','-', uniqueString(concat(variables('_solutionId'),'-','ResourcesDataConnector','-',variables('_dataConnectorContentIdConnections1'),'-', variables('dataConnectorCCPVersion'))))]", + "packageId": "[variables('_solutionId')]", + "contentSchemaVersion": "3.0.0", + "version": "[variables('dataConnectorCCPVersion')]" + } + }, + { + "type": "Microsoft.OperationalInsights/workspaces/providers/contentPackages", + "apiVersion": "2023-04-01-preview", + "location": "[parameters('workspace-location')]", + "properties": { + "version": "3.0.0", + "kind": "Solution", + "contentSchemaVersion": "3.0.0", + "displayName": "SOC Prime CCF", + "publisherDisplayName": "SOC Prime", + "descriptionHtml": "

Note: Please refer to the following before installing the solution:

\n

• Review the solution Release Notes

\n

• There may be known issues pertaining to this Solution, please refer to them before installing.

\n

The SOC Prime Audit Logs data connector allows ingesting logs from the SOC Prime Platform API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the SOC Prime Platform API to fetch SOC Prime platform audit logs and it supports DCR-based ingestion time transformations that parses the received security data into a custom table, thus resulting in better performance.

\n

Data Connectors: 1

\n

Learn more about Microsoft Sentinel | Learn more about Solutions

\n", + "contentKind": "Solution", + "contentProductId": "[variables('_solutioncontentProductId')]", + "id": "[variables('_solutioncontentProductId')]", + "icon": "", + "contentId": "[variables('_solutionId')]", + "parentId": "[variables('_solutionId')]", + "source": { + "kind": "Solution", + "name": "SOC Prime CCF", + "sourceId": "[variables('_solutionId')]" + }, + "author": { + "name": "SOC Prime", + "email": "[variables('_email')]" + }, + "support": { + "name": "SOC Prime", + "email": "support@socprime.com", + "tier": "Partner", + "link": "https://socprime.com/" + }, + "dependencies": { + "operator": "AND", + "criteria": [ + { + "kind": "DataConnector", + "contentId": "[variables('_dataConnectorContentIdConnections1')]", + "version": "[variables('dataConnectorCCPVersion')]" + } + ] + }, + "firstPublishDate": "2025-09-25", + "providers": [ + "SOC Prime" + ], + "categories": { + "domains": [ + "Security - Threat Protection" + ] + } + }, + "name": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/', variables('_solutionId'))]" + } + ], + "outputs": {} +} diff --git a/Solutions/SOC Prime CCF/Package/testParameters.json b/Solutions/SOC Prime CCF/Package/testParameters.json new file mode 100644 index 00000000000..554801e41b7 --- /dev/null +++ b/Solutions/SOC Prime CCF/Package/testParameters.json @@ -0,0 +1,38 @@ +{ + "location": { + "type": "string", + "minLength": 1, + "defaultValue": "[resourceGroup().location]", + "metadata": { + "description": "Not used, but needed to pass arm-ttk test `Location-Should-Not-Be-Hardcoded`. We instead use the `workspace-location` which is derived from the LA workspace" + } + }, + "workspace-location": { + "type": "string", + "defaultValue": "", + "metadata": { + "description": "[concat('Region to deploy solution resources -- separate from location selection',parameters('location'))]" + } + }, + "workspace": { + "defaultValue": "", + "type": "string", + "metadata": { + "description": "Workspace name for Log Analytics where Microsoft Sentinel is setup" + } + }, + "resourceGroupName": { + "type": "string", + "defaultValue": "[resourceGroup().name]", + "metadata": { + "description": "resource group name where Microsoft Sentinel is setup" + } + }, + "subscription": { + "type": "string", + "defaultValue": "[last(split(subscription().id, '/'))]", + "metadata": { + "description": "subscription id where Microsoft Sentinel is setup" + } + } +} diff --git a/Solutions/SOC Prime CCF/ReleaseNotes.md b/Solutions/SOC Prime CCF/ReleaseNotes.md new file mode 100644 index 00000000000..7c833f6411f --- /dev/null +++ b/Solutions/SOC Prime CCF/ReleaseNotes.md @@ -0,0 +1,3 @@ +| **Version** | **Date Modified (DD-MM-YYYY)** | **Change History** | +|-------------|--------------------------------|----------------------------------------| +| 3.0.0 | 24-11-2025 | Initial Solution Release. | \ No newline at end of file diff --git a/Solutions/SOC Prime CCF/SolutionMetadata.json b/Solutions/SOC Prime CCF/SolutionMetadata.json new file mode 100644 index 00000000000..a4672ce5af8 --- /dev/null +++ b/Solutions/SOC Prime CCF/SolutionMetadata.json @@ -0,0 +1,16 @@ +{ + "publisherId": "socprime", + "offerId": "azure-sentinel-solution-socprimeauditccp", + "firstPublishDate": "2025-09-25", + "providers": ["SOC Prime"], + "categories": { + "domains": ["Security - Threat Protection"], + "verticals": [] + }, + "support": { + "tier": "Partner", + "name": "SOC Prime", + "email": "support@socprime.com", + "link": "https://socprime.com/" + } +} \ No newline at end of file diff --git a/Solutions/SentinelSOARessentials/Data/Solution_SentinelSOAREssentials.json b/Solutions/SentinelSOARessentials/Data/Solution_SentinelSOAREssentials.json index 5b6c04e28fa..a5607c97a43 100644 --- a/Solutions/SentinelSOARessentials/Data/Solution_SentinelSOAREssentials.json +++ b/Solutions/SentinelSOARessentials/Data/Solution_SentinelSOAREssentials.json @@ -21,7 +21,10 @@ "Playbooks/Defender_XDR_BEC_Playbook_for_SecOps-Tasks/azuredeploy.json", "Playbooks/Defender_XDR_Phishing_Playbook_for_SecOps-Tasks/azuredeploy.json", "Playbooks/Defender_XDR_Ransomware_Playbook_for_SecOps-Tasks/azuredeploy.json", - "Playbooks/Send-Teams-adaptive-card-on-incident-creation/azuredeploy.json" + "Playbooks/Send-Teams-adaptive-card-on-incident-creation/azuredeploy.json", + "Playbooks/Http-Trigger-Entity-Analyzer/azuredeploy.json", + "Playbooks/Incident-Trigger-Entity-Analyzer/azuredeploy.json", + "Playbooks/Url-Trigger-Entity-Analyzer/azuredeploy.json" ], "Workbooks": [ "Workbooks/AutomationHealth.json", diff --git a/Solutions/SentinelSOARessentials/Package/3.0.4.zip b/Solutions/SentinelSOARessentials/Package/3.0.4.zip new file mode 100644 index 00000000000..65a7a8b7a01 Binary files /dev/null and b/Solutions/SentinelSOARessentials/Package/3.0.4.zip differ diff --git a/Solutions/SentinelSOARessentials/Package/createUiDefinition.json b/Solutions/SentinelSOARessentials/Package/createUiDefinition.json index 8a7ad5a3ab0..a61863162c9 100644 --- a/Solutions/SentinelSOARessentials/Package/createUiDefinition.json +++ b/Solutions/SentinelSOARessentials/Package/createUiDefinition.json @@ -6,7 +6,7 @@ "config": { "isWizard": false, "basics": { - "description": "\n\n**Note:** Please refer to the following before installing the solution: \n\n• Review the solution [Release Notes](https://github.com/Azure/Azure-Sentinel/tree/master/Solutions/SentinelSOARessentials/ReleaseNotes.md)\n\n • There may be [known issues](https://aka.ms/sentinelsolutionsknownissues) pertaining to this Solution, please refer to them before installing.\n\nThe Microsoft Sentinel SOAR Essentials solution for Microsoft Sentinel contains Playbooks that can help you get started with basic notification and orchestration scenarios for common use cases. These include Playbooks for sending notifications over email and/or collaboration platforms such as MS Teams, Slack, etc.\n\n**Workbooks:** 4, **Playbooks:** 18\n\n[Learn more about Microsoft Sentinel](https://aka.ms/azuresentinel) | [Learn more about Solutions](https://aka.ms/azuresentinelsolutionsdoc)", + "description": "\n\n**Note:** Please refer to the following before installing the solution: \n\n• Review the solution [Release Notes](https://github.com/Azure/Azure-Sentinel/tree/master/Solutions/SentinelSOARessentials/ReleaseNotes.md)\n\n • There may be [known issues](https://aka.ms/sentinelsolutionsknownissues) pertaining to this Solution, please refer to them before installing.\n\nThe Microsoft Sentinel SOAR Essentials solution for Microsoft Sentinel contains Playbooks that can help you get started with basic notification and orchestration scenarios for common use cases. These include Playbooks for sending notifications over email and/or collaboration platforms such as MS Teams, Slack, etc.\n\n**Workbooks:** 4, **Playbooks:** 21\n\n[Learn more about Microsoft Sentinel](https://aka.ms/azuresentinel) | [Learn more about Solutions](https://aka.ms/azuresentinelsolutionsdoc)", "subscription": { "resourceProviders": [ "Microsoft.OperationsManagement/solutions", diff --git a/Solutions/SentinelSOARessentials/Package/mainTemplate.json b/Solutions/SentinelSOARessentials/Package/mainTemplate.json index a713fdab92f..32a8cbdde5a 100644 --- a/Solutions/SentinelSOARessentials/Package/mainTemplate.json +++ b/Solutions/SentinelSOARessentials/Package/mainTemplate.json @@ -65,7 +65,7 @@ "email": "support@microsoft.com", "_email": "[variables('email')]", "_solutionName": "SentinelSOARessentials", - "_solutionVersion": "3.0.3", + "_solutionVersion": "3.0.4", "solutionId": "azuresentinel.azure-sentinel-solution-sentinelsoaressentials", "_solutionId": "[variables('solutionId')]", "Incident-Assignment-Shifts": "Incident-Assignment-Shifts", @@ -215,6 +215,30 @@ "playbookId18": "[resourceId('Microsoft.Logic/workflows', variables('playbookContentId18'))]", "playbookTemplateSpecName18": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat(parameters('workspace'),'-pl-',uniquestring(variables('_playbookContentId18'))))]", "_playbookcontentProductId18": "[concat(take(variables('_solutionId'),50),'-','pl','-', uniqueString(concat(variables('_solutionId'),'-','Playbook','-',variables('_playbookContentId18'),'-', variables('playbookVersion18'))))]", + "Http-Trigger-Entity-Analyzer": "Http-Trigger-Entity-Analyzer", + "_Http-Trigger-Entity-Analyzer": "[variables('Http-Trigger-Entity-Analyzer')]", + "playbookVersion19": "1.0", + "playbookContentId19": "Http-Trigger-Entity-Analyzer", + "_playbookContentId19": "[variables('playbookContentId19')]", + "playbookId19": "[resourceId('Microsoft.Logic/workflows', variables('playbookContentId19'))]", + "playbookTemplateSpecName19": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat(parameters('workspace'),'-pl-',uniquestring(variables('_playbookContentId19'))))]", + "_playbookcontentProductId19": "[concat(take(variables('_solutionId'),50),'-','pl','-', uniqueString(concat(variables('_solutionId'),'-','Playbook','-',variables('_playbookContentId19'),'-', variables('playbookVersion19'))))]", + "Incident-Trigger-Entity-Analyzer": "Incident-Trigger-Entity-Analyzer", + "_Incident-Trigger-Entity-Analyzer": "[variables('Incident-Trigger-Entity-Analyzer')]", + "playbookVersion20": "1.0", + "playbookContentId20": "Incident-Trigger-Entity-Analyzer", + "_playbookContentId20": "[variables('playbookContentId20')]", + "playbookId20": "[resourceId('Microsoft.Logic/workflows', variables('playbookContentId20'))]", + "playbookTemplateSpecName20": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat(parameters('workspace'),'-pl-',uniquestring(variables('_playbookContentId20'))))]", + "_playbookcontentProductId20": "[concat(take(variables('_solutionId'),50),'-','pl','-', uniqueString(concat(variables('_solutionId'),'-','Playbook','-',variables('_playbookContentId20'),'-', variables('playbookVersion20'))))]", + "Url-Trigger-Entity-Analyzer": "Url-Trigger-Entity-Analyzer", + "_Url-Trigger-Entity-Analyzer": "[variables('Url-Trigger-Entity-Analyzer')]", + "playbookVersion21": "1.0", + "playbookContentId21": "Url-Trigger-Entity-Analyzer", + "_playbookContentId21": "[variables('playbookContentId21')]", + "playbookId21": "[resourceId('Microsoft.Logic/workflows', variables('playbookContentId21'))]", + "playbookTemplateSpecName21": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat(parameters('workspace'),'-pl-',uniquestring(variables('_playbookContentId21'))))]", + "_playbookcontentProductId21": "[concat(take(variables('_solutionId'),50),'-','pl','-', uniqueString(concat(variables('_solutionId'),'-','Playbook','-',variables('_playbookContentId21'),'-', variables('playbookVersion21'))))]", "workbookVersion1": "2.0.0", "workbookContentId1": "AutomationHealth", "workbookId1": "[resourceId('Microsoft.Insights/workbooks', variables('workbookContentId1'))]", @@ -227,7 +251,7 @@ "workbookTemplateSpecName2": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat(parameters('workspace'),'-wb-',uniquestring(variables('_workbookContentId2'))))]", "_workbookContentId2": "[variables('workbookContentId2')]", "_workbookcontentProductId2": "[concat(take(variables('_solutionId'),50),'-','wb','-', uniqueString(concat(variables('_solutionId'),'-','Workbook','-',variables('_workbookContentId2'),'-', variables('workbookVersion2'))))]", - "workbookVersion3": "1.5.1", + "workbookVersion3": "1.5.2", "workbookContentId3": "SecurityOperationsEfficiency", "workbookId3": "[resourceId('Microsoft.Insights/workbooks', variables('workbookContentId3'))]", "workbookTemplateSpecName3": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat(parameters('workspace'),'-wb-',uniquestring(variables('_workbookContentId3'))))]", @@ -251,7 +275,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "Sentinel_Incident_Assignment_Shifts Playbook with template version 3.0.3", + "description": "Sentinel_Incident_Assignment_Shifts Playbook with template version 3.0.4", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('playbookVersion1')]", @@ -934,7 +958,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "Notify-IncidentClosed Playbook with template version 3.0.3", + "description": "Notify-IncidentClosed Playbook with template version 3.0.4", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('playbookVersion2')]", @@ -1311,7 +1335,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "Notify-IncidentReopened Playbook with template version 3.0.3", + "description": "Notify-IncidentReopened Playbook with template version 3.0.4", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('playbookVersion3')]", @@ -1680,7 +1704,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "Notify-IncidentSeverityChanged Playbook with template version 3.0.3", + "description": "Notify-IncidentSeverityChanged Playbook with template version 3.0.4", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('playbookVersion4')]", @@ -2045,7 +2069,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "updatetrigger-notifyOwner Playbook with template version 3.0.3", + "description": "updatetrigger-notifyOwner Playbook with template version 3.0.4", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('playbookVersion5')]", @@ -2256,7 +2280,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "PostMessageSlack-OnAlert Playbook with template version 3.0.3", + "description": "PostMessageSlack-OnAlert Playbook with template version 3.0.4", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('playbookVersion6')]", @@ -2468,7 +2492,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "PostMessageTeams-OnAlert Playbook with template version 3.0.3", + "description": "PostMessageTeams-OnAlert Playbook with template version 3.0.4", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('playbookVersion7')]", @@ -2709,7 +2733,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "PostMessageTeams Playbook with template version 3.0.3", + "description": "PostMessageTeams Playbook with template version 3.0.4", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('playbookVersion8')]", @@ -2932,7 +2956,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "PostMessageSlack Playbook with template version 3.0.3", + "description": "PostMessageSlack Playbook with template version 3.0.4", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('playbookVersion9')]", @@ -3135,7 +3159,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "relateAlertsToIncident-basedOnIP Playbook with template version 3.0.3", + "description": "relateAlertsToIncident-basedOnIP Playbook with template version 3.0.4", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('playbookVersion10')]", @@ -3517,7 +3541,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "Send-basic-email Playbook with template version 3.0.3", + "description": "Send-basic-email Playbook with template version 3.0.4", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('playbookVersion11')]", @@ -3771,7 +3795,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "Send-email-with-formatted-incident-report Playbook with template version 3.0.3", + "description": "Send-email-with-formatted-incident-report Playbook with template version 3.0.4", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('playbookVersion12')]", @@ -4065,7 +4089,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "CreateIncident-MicrosoftForm Playbook with template version 3.0.3", + "description": "CreateIncident-MicrosoftForm Playbook with template version 3.0.4", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('playbookVersion13')]", @@ -4431,7 +4455,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "CreateIncident-SharedMailbox Playbook with template version 3.0.3", + "description": "CreateIncident-SharedMailbox Playbook with template version 3.0.4", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('playbookVersion14')]", @@ -4809,7 +4833,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "Defender_XDR_BEC_Playbook_for_SecOps-Tasks Playbook with template version 3.0.3", + "description": "Defender_XDR_BEC_Playbook_for_SecOps-Tasks Playbook with template version 3.0.4", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('playbookVersion15')]", @@ -5284,7 +5308,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "Defender_XDR_Phishing_Playbook_for_SecOps-Tasks Playbook with template version 3.0.3", + "description": "Defender_XDR_Phishing_Playbook_for_SecOps-Tasks Playbook with template version 3.0.4", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('playbookVersion16')]", @@ -5763,7 +5787,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "Defender_XDR_Ransomware_Playbook_for_SecOps-Tasks Playbook with template version 3.0.3", + "description": "Defender_XDR_Ransomware_Playbook_for_SecOps-Tasks Playbook with template version 3.0.4", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('playbookVersion17')]", @@ -6839,7 +6863,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "Send-Teams-adaptive-card-on-incident-creation Playbook with template version 3.0.3", + "description": "Send-Teams-adaptive-card-on-incident-creation Playbook with template version 3.0.4", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('playbookVersion18')]", @@ -7281,6 +7305,1004 @@ "version": "[variables('playbookVersion18')]" } }, + { + "type": "Microsoft.OperationalInsights/workspaces/providers/contentTemplates", + "apiVersion": "2023-04-01-preview", + "name": "[variables('playbookTemplateSpecName19')]", + "location": "[parameters('workspace-location')]", + "dependsOn": [ + "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" + ], + "properties": { + "description": "Http-Trigger-Entity-Analyzer Playbook with template version 3.0.4", + "mainTemplate": { + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "[variables('playbookVersion19')]", + "parameters": { + "PlaybookName": { + "defaultValue": "Http-Trigger-Entity-Analyzer", + "type": "string", + "metadata": { + "description": "Name of the Logic App playbook" + } + }, + "workspaceId": { + "type": "string", + "metadata": { + "description": "Microsoft Sentinel workspace ID" + } + }, + "lookBackDays": { + "defaultValue": 40, + "type": "int", + "metadata": { + "description": "Number of days to look back for entity analysis" + } + } + }, + "variables": { + "SentinelMCPConnectionName": "[[concat('SentinelMCP-', parameters('PlaybookName'))]", + "connection-1": "[[concat('/subscriptions/', subscription().subscriptionId, '/providers/Microsoft.Web/locations/', variables('workspace-location-inline'), '/managedApis/sentinelmcp')]", + "_connection-1": "[[variables('connection-1')]", + "workspace-location-inline": "[concat('[resourceGroup().locatio', 'n]')]", + "workspace-name": "[parameters('workspace')]", + "workspaceResourceId": "[[resourceId('microsoft.OperationalInsights/Workspaces', variables('workspace-name'))]" + }, + "resources": [ + { + "type": "Microsoft.Web/connections", + "apiVersion": "2016-06-01", + "name": "[[variables('SentinelMCPConnectionName')]", + "location": "[[variables('workspace-location-inline')]", + "kind": "V1", + "properties": { + "displayName": "[[variables('SentinelMCPConnectionName')]", + "api": { + "id": "[[variables('_connection-1')]" + } + } + }, + { + "type": "Microsoft.Logic/workflows", + "apiVersion": "2017-07-01", + "name": "[[parameters('PlaybookName')]", + "location": "[[variables('workspace-location-inline')]", + "tags": { + "Created By": "ARM Template", + "hidden-SentinelWorkspaceId": "[[variables('workspaceResourceId')]" + }, + "dependsOn": [ + "[[resourceId('Microsoft.Web/connections', variables('SentinelMCPConnectionName'))]" + ], + "properties": { + "state": "Enabled", + "definition": { + "$schema": "https://schema.management.azure.com/providers/Microsoft.Logic/schemas/2016-06-01/workflowdefinition.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "$connections": { + "type": "Object" + } + }, + "triggers": { + "When_an_HTTP_request_is_received": { + "type": "Request", + "kind": "Http", + "inputs": { + "schema": { + "type": "object", + "properties": { + "Type": { + "type": "string" + }, + "Value": { + "type": "string" + } + } + } + } + } + }, + "actions": { + "Response_Accepted": { + "type": "Response", + "kind": "Http", + "inputs": { + "statusCode": 202, + "headers": { + "Retry-After": "10" + }, + "body": { + "status": "Accepted", + "message": "Entity analysis started. Processing in background.", + "runId": "@{workflow().run.name}", + "entityType": "@{triggerBody()?['Type']}", + "entityValue": "@{triggerBody()?['Value']}" + } + } + }, + "Condition": { + "actions": { + "User_Analyzer": { + "type": "ApiConnection", + "inputs": { + "host": { + "connection": { + "name": "@parameters('$connections')['sentinelmcp']['connectionId']" + } + }, + "method": "post", + "body": { + "workspaceId": "[[parameters('workspaceId')]", + "lookBackDays": "[[parameters('lookBackDays')]", + "properties": { + "entityType": "User", + "userId": "@{triggerBody()?['Value']}" + } + }, + "path": "/aiprimitives/analysis", + "queries": { + "api-version": "2025-08-01-preview" + } + } + }, + "Parse_JSON": { + "runAfter": { + "User_Analyzer": [ + "Succeeded" + ] + }, + "type": "ParseJson", + "inputs": { + "content": "@body('User_Analyzer')", + "schema": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "status": { + "type": "string" + }, + "classification": { + "type": "string" + }, + "analysis": { + "type": "string" + }, + "recommendation": { + "type": "string" + }, + "disclaimer": { + "type": "string" + }, + "dataSourceList": { + "type": "array", + "items": { + "type": "string" + } + }, + "properties": { + "type": "object", + "properties": { + "entityType": { + "type": "string" + } + } + } + } + } + } + } + }, + "runAfter": { + "Response_Accepted": [ + "Succeeded" + ] + }, + "else": { + "actions": { + "Url_Analyzer": { + "type": "ApiConnection", + "inputs": { + "host": { + "connection": { + "name": "@parameters('$connections')['sentinelmcp']['connectionId']" + } + }, + "method": "post", + "body": { + "workspaceId": "[[parameters('workspaceId')]", + "lookBackDays": "[[parameters('lookBackDays')]", + "properties": { + "entityType": "@{triggerBody()?['Type']}", + "url": "@{triggerBody()?['Value']}" + } + }, + "path": "/aiprimitives/analysis", + "queries": { + "api-version": "2025-08-01-preview" + } + } + }, + "Parse_JSON_1": { + "runAfter": { + "Url_Analyzer": [ + "Succeeded" + ] + }, + "type": "ParseJson", + "inputs": { + "content": "@body('Url_Analyzer')", + "schema": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "status": { + "type": "string" + }, + "classification": { + "type": "string" + }, + "analysis": { + "type": "string" + }, + "recommendation": { + "type": "string" + }, + "disclaimer": { + "type": "string" + }, + "dataSourceList": { + "type": "array", + "items": { + "type": "string" + } + }, + "properties": { + "type": "object", + "properties": { + "entityType": { + "type": "string" + } + } + } + } + } + } + } + } + }, + "expression": { + "and": [ + { + "equals": [ + "@triggerBody()?['Type']", + "User" + ] + } + ] + }, + "type": "If" + } + } + }, + "parameters": { + "$connections": { + "value": { + "sentinelmcp": { + "connectionId": "[[resourceId('Microsoft.Web/connections', variables('SentinelMCPConnectionName'))]", + "connectionName": "[[variables('SentinelMCPConnectionName')]", + "id": "[[concat('/subscriptions/', subscription().subscriptionId, '/providers/Microsoft.Web/locations/', variables('workspace-location-inline'), '/managedApis/sentinelmcp')]" + } + } + } + } + } + }, + { + "type": "Microsoft.OperationalInsights/workspaces/providers/metadata", + "apiVersion": "2022-01-01-preview", + "name": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat('Playbook-', last(split(variables('playbookId19'),'/'))))]", + "properties": { + "parentId": "[variables('playbookId19')]", + "contentId": "[variables('_playbookContentId19')]", + "kind": "Playbook", + "version": "[variables('playbookVersion19')]", + "source": { + "kind": "Solution", + "name": "SentinelSOARessentials", + "sourceId": "[variables('_solutionId')]" + }, + "author": { + "name": "Microsoft", + "email": "[variables('_email')]" + }, + "support": { + "tier": "Microsoft", + "name": "Microsoft Corporation", + "email": "support@microsoft.com", + "link": "https://support.microsoft.com/" + } + } + } + ], + "metadata": { + "title": "HTTP Trigger Entity Analyzer", + "description": "This playbook is triggered by HTTP POST requests with entity information and performs automated investigation and enrichment of URL and User entities with asynchronous processing.", + "prerequisites": [ + "1. The user deploying this Logic App needs to have a Contributor Role.", + "2. The user has permissions to access Microsoft Sentinel workspace.", + "3. Microsoft Sentinel data connector lake must be enabled in your workspace for entity data collection.", + "4. The SentinelMCP connector is available in your environment.", + "5. You need to provide a valid Sentinel workspace ID during deployment.", + "6. Authentication support for the Entity Analyzer connection includes Entra ID Authentication (OAuth), Service Principal (Application ID and Secret), or Managed Identity (System-assigned or User-assigned)." + ], + "postDeployment": [ + "1. Authenticate the connections: Go to the Logic App → API connections and authenticate SentinelMCP connection with Microsoft Sentinel MCP permissions.", + "2. Authenticate the Entity Analyzer connection using one of the supported methods: Entra ID Auth, Service Principal, or Managed Identity.", + "3. Get the HTTP endpoint URL: Open the Logic App → Go to Logic app designer → Click on the HTTP trigger → Copy the HTTP POST URL.", + "4. The playbook will trigger when POST requests are sent to the HTTP endpoint." + ], + "lastUpdateTime": "2025-12-07T00:00:00Z", + "entities": [ + "URL", + "Account" + ], + "tags": [ + "Enrichment", + "Utilities", + "Entity Analysis", + "API Integration" + ], + "releaseNotes": { + "version": "1.0", + "title": "[variables('blanks')]", + "notes": [ + "Initial version" + ] + } + } + }, + "packageKind": "Solution", + "packageVersion": "[variables('_solutionVersion')]", + "packageName": "[variables('_solutionName')]", + "packageId": "[variables('_solutionId')]", + "contentSchemaVersion": "3.0.0", + "contentId": "[variables('_playbookContentId19')]", + "contentKind": "Playbook", + "displayName": "Http-Trigger-Entity-Analyzer", + "contentProductId": "[variables('_playbookcontentProductId19')]", + "id": "[variables('_playbookcontentProductId19')]", + "version": "[variables('playbookVersion19')]" + } + }, + { + "type": "Microsoft.OperationalInsights/workspaces/providers/contentTemplates", + "apiVersion": "2023-04-01-preview", + "name": "[variables('playbookTemplateSpecName20')]", + "location": "[parameters('workspace-location')]", + "dependsOn": [ + "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" + ], + "properties": { + "description": "Entity-Analyzer-Incident-Trigger Playbook with template version 3.0.4", + "mainTemplate": { + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "[variables('playbookVersion20')]", + "parameters": { + "PlaybookName": { + "defaultValue": "Entity-Analyzer-Incident-Trigger", + "type": "string" + }, + "lookBackDays": { + "defaultValue": 60, + "type": "int", + "metadata": { + "description": "Number of days to look back for entity analysis" + } + } + }, + "variables": { + "MicrosoftSentinelConnectionName": "[[concat('MicrosoftSentinel-', parameters('PlaybookName'))]", + "SentinelMCPConnectionName": "[[concat('SentinelMCP-', parameters('PlaybookName'))]", + "connection-2": "[[concat('/subscriptions/', subscription().subscriptionId, '/providers/Microsoft.Web/locations/', variables('workspace-location-inline'), '/managedApis/Azuresentinel')]", + "_connection-2": "[[variables('connection-2')]", + "connection-3": "[[concat('/subscriptions/', subscription().subscriptionId, '/providers/Microsoft.Web/locations/', variables('workspace-location-inline'), '/managedApis/sentinelmcp')]", + "_connection-3": "[[variables('connection-3')]", + "workspace-location-inline": "[concat('[resourceGroup().locatio', 'n]')]", + "workspace-name": "[parameters('workspace')]", + "workspaceResourceId": "[[resourceId('microsoft.OperationalInsights/Workspaces', variables('workspace-name'))]" + }, + "resources": [ + { + "type": "Microsoft.Logic/workflows", + "apiVersion": "2017-07-01", + "name": "[[parameters('PlaybookName')]", + "location": "[[variables('workspace-location-inline')]", + "identity": { + "type": "SystemAssigned" + }, + "tags": { + "Created By": "ARM Template", + "hidden-SentinelWorkspaceId": "[[variables('workspaceResourceId')]" + }, + "dependsOn": [ + "[[resourceId('Microsoft.Web/connections', variables('MicrosoftSentinelConnectionName'))]", + "[[resourceId('Microsoft.Web/connections', variables('SentinelMCPConnectionName'))]" + ], + "properties": { + "state": "Enabled", + "definition": { + "$schema": "https://schema.management.azure.com/providers/Microsoft.Logic/schemas/2016-06-01/workflowdefinition.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "$connections": { + "type": "Object" + } + }, + "triggers": { + "Microsoft_Sentinel_incident": { + "type": "ApiConnectionWebhook", + "inputs": { + "host": { + "connection": { + "name": "@parameters('$connections')['azuresentinel']['connectionId']" + } + }, + "body": { + "callback_url": "@listCallbackUrl()" + }, + "path": "/incident-creation" + } + } + }, + "actions": { + "Entities_-_Get_URLs": { + "type": "ApiConnection", + "inputs": { + "host": { + "connection": { + "name": "@parameters('$connections')['azuresentinel']['connectionId']" + } + }, + "method": "post", + "body": "@triggerBody()?['object']?['properties']?['relatedEntities']", + "path": "/entities/url" + } + }, + "For_each_URL": { + "foreach": "@body('Entities_-_Get_URLs')?['URLs']", + "actions": { + "URL_Analyzer": { + "type": "ApiConnection", + "inputs": { + "host": { + "connection": { + "name": "@parameters('$connections')['sentinelmcp']['connectionId']" + } + }, + "method": "post", + "body": { + "workspaceId": "@triggerBody()?['workspaceId']", + "lookBackDays": "[[parameters('lookBackDays')]", + "properties": { + "entityType": "Url", + "url": "@{item()?['Url']}" + } + }, + "path": "/aiprimitives/analysis", + "queries": { + "api-version": "2025-08-01-preview" + } + } + }, + "Compose_Url": { + "runAfter": { + "URL_Analyzer": [ + "Succeeded" + ] + }, + "type": "Compose", + "inputs": "@concat(\r\n'🔗 **URL Analysis for: ', item()?['Url'], '**\\n\\n',\r\n'🏷️ **Classification**\\n\\n',\r\nbody('URL_Analyzer')?['classification'], '\\n\\n',\r\n'🔍 **Analysis Result**\\n\\n',\r\nbody('URL_Analyzer')?['analysis'], '\\n\\n',\r\n'✅ **Recommendation**\\n\\n',\r\nbody('URL_Analyzer')?['recommendation'], '\\n\\n',\r\n'⚠️ **Disclaimer**\\n\\n',\r\n'🤖 ', body('URL_Analyzer')?['disclaimer']\r\n)" + }, + "Add_Url_comment_to_incident": { + "runAfter": { + "Compose_Url": [ + "Succeeded" + ] + }, + "type": "ApiConnection", + "inputs": { + "host": { + "connection": { + "name": "@parameters('$connections')['azuresentinel']['connectionId']" + } + }, + "method": "post", + "body": { + "incidentArmId": "@triggerBody()?['object']?['id']", + "message": "

@{outputs('Compose_Url')}

" + }, + "path": "/Incidents/Comment" + } + } + }, + "runAfter": { + "Entities_-_Get_URLs": [ + "Succeeded" + ] + }, + "type": "Foreach" + }, + "Entities_-_Get_Accounts": { + "type": "ApiConnection", + "inputs": { + "host": { + "connection": { + "name": "@parameters('$connections')['azuresentinel']['connectionId']" + } + }, + "method": "post", + "body": "@triggerBody()?['object']?['properties']?['relatedEntities']", + "path": "/entities/account" + } + }, + "For_each_User": { + "foreach": "@body('Entities_-_Get_Accounts')?['Accounts']", + "actions": { + "User_Analyzer": { + "type": "ApiConnection", + "inputs": { + "host": { + "connection": { + "name": "@parameters('$connections')['sentinelmcp']['connectionId']" + } + }, + "method": "post", + "body": { + "workspaceId": "@triggerBody()?['workspaceId']", + "lookBackDays": "[[parameters('lookBackDays')]", + "properties": { + "entityType": "User", + "userId": "@{item()?['AadUserId']}" + } + }, + "path": "/aiprimitives/analysis", + "queries": { + "api-version": "2025-08-01-preview" + } + } + }, + "Compose_User": { + "runAfter": { + "User_Analyzer": [ + "Succeeded" + ] + }, + "type": "Compose", + "inputs": "@concat(\r\n'👤 **User Analysis for: ', item()?['Name'], '**\\n\\n',\r\n'🏷️ **Classification**\\n\\n',\r\nbody('User_Analyzer')?['classification'], '\\n\\n',\r\n'🔍 **Analysis Result**\\n\\n',\r\nbody('User_Analyzer')?['analysis'], '\\n\\n',\r\n'✅ **Recommendation**\\n\\n',\r\nbody('User_Analyzer')?['recommendation'], '\\n\\n',\r\n'⚠️ **Disclaimer**\\n\\n',\r\n'🤖 ', body('User_Analyzer')?['disclaimer']\r\n)" + }, + "Add_User_comment_to_incident": { + "runAfter": { + "Compose_User": [ + "Succeeded" + ] + }, + "type": "ApiConnection", + "inputs": { + "host": { + "connection": { + "name": "@parameters('$connections')['azuresentinel']['connectionId']" + } + }, + "method": "post", + "body": { + "incidentArmId": "@triggerBody()?['object']?['id']", + "message": "

@{outputs('Compose_User')}

" + }, + "path": "/Incidents/Comment" + } + } + }, + "runAfter": { + "Entities_-_Get_Accounts": [ + "Succeeded" + ] + }, + "type": "Foreach" + } + } + }, + "parameters": { + "$connections": { + "value": { + "azuresentinel": { + "connectionId": "[[resourceId('Microsoft.Web/connections', variables('MicrosoftSentinelConnectionName'))]", + "connectionName": "[[variables('MicrosoftSentinelConnectionName')]", + "id": "[[concat('/subscriptions/', subscription().subscriptionId, '/providers/Microsoft.Web/locations/', variables('workspace-location-inline'), '/managedApis/Azuresentinel')]", + "connectionProperties": { + "authentication": { + "type": "ManagedServiceIdentity" + } + } + }, + "sentinelmcp": { + "connectionId": "[[resourceId('Microsoft.Web/connections', variables('SentinelMCPConnectionName'))]", + "connectionName": "[[variables('SentinelMCPConnectionName')]", + "id": "[[concat('/subscriptions/', subscription().subscriptionId, '/providers/Microsoft.Web/locations/', variables('workspace-location-inline'), '/managedApis/sentinelmcp')]" + } + } + } + } + } + }, + { + "type": "Microsoft.Web/connections", + "apiVersion": "2016-06-01", + "name": "[[variables('MicrosoftSentinelConnectionName')]", + "location": "[[variables('workspace-location-inline')]", + "kind": "V1", + "properties": { + "displayName": "[[variables('MicrosoftSentinelConnectionName')]", + "parameterValueType": "Alternative", + "api": { + "id": "[[variables('_connection-2')]" + } + } + }, + { + "type": "Microsoft.Web/connections", + "apiVersion": "2016-06-01", + "name": "[[variables('SentinelMCPConnectionName')]", + "location": "[[variables('workspace-location-inline')]", + "kind": "V1", + "properties": { + "displayName": "[[variables('SentinelMCPConnectionName')]", + "api": { + "id": "[[variables('_connection-3')]" + } + } + }, + { + "type": "Microsoft.OperationalInsights/workspaces/providers/metadata", + "apiVersion": "2022-01-01-preview", + "name": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat('Playbook-', last(split(variables('playbookId20'),'/'))))]", + "properties": { + "parentId": "[variables('playbookId20')]", + "contentId": "[variables('_playbookContentId20')]", + "kind": "Playbook", + "version": "[variables('playbookVersion20')]", + "source": { + "kind": "Solution", + "name": "SentinelSOARessentials", + "sourceId": "[variables('_solutionId')]" + }, + "author": { + "name": "Microsoft", + "email": "[variables('_email')]" + }, + "support": { + "tier": "Microsoft", + "name": "Microsoft Corporation", + "email": "support@microsoft.com", + "link": "https://support.microsoft.com/" + } + } + } + ], + "metadata": { + "title": "Incident Trigger Entity Analyzer", + "description": "This playbook is triggered by Microsoft Sentinel incidents and performs automated investigation and enrichment of URL and User entities associated with the incident.", + "prerequisites": [ + "1. A Microsoft Sentinel workspace must be configured.", + "2. The user deploying this Logic App needs to have a Contributor Role.", + "3. The user has permissions to access Microsoft Sentinel workspace.", + "4. The SentinelMCP connector is available in your environment." + ], + "postDeployment": [ + "1. Authenticate the connections: Go to the Logic App → API connections and authenticate Microsoft Sentinel connection with a user that has Sentinel permissions.", + "2. Authenticate the SentinelMCP connection with Microsoft Sentinel MCP permissions.", + "3. The playbook will automatically trigger when new incidents are created.", + "4. Consider creating an automation rule to run this playbook automatically on specific incident types." + ], + "lastUpdateTime": "2025-12-07T00:00:00Z", + "entities": [ + "URL", + "Account" + ], + "tags": [ + "Enrichment", + "Utilities", + "Entity Analysis" + ], + "releaseNotes": { + "version": "1.0", + "title": "[variables('blanks')]", + "notes": [ + "Initial version" + ] + } + } + }, + "packageKind": "Solution", + "packageVersion": "[variables('_solutionVersion')]", + "packageName": "[variables('_solutionName')]", + "packageId": "[variables('_solutionId')]", + "contentSchemaVersion": "3.0.0", + "contentId": "[variables('_playbookContentId20')]", + "contentKind": "Playbook", + "displayName": "Entity-Analyzer-Incident-Trigger", + "contentProductId": "[variables('_playbookcontentProductId20')]", + "id": "[variables('_playbookcontentProductId20')]", + "version": "[variables('playbookVersion20')]" + } + }, + { + "type": "Microsoft.OperationalInsights/workspaces/providers/contentTemplates", + "apiVersion": "2023-04-01-preview", + "name": "[variables('playbookTemplateSpecName21')]", + "location": "[parameters('workspace-location')]", + "dependsOn": [ + "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" + ], + "properties": { + "description": "Entity-analyzer-Url-Trigger Playbook with template version 3.0.4", + "mainTemplate": { + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "[variables('playbookVersion21')]", + "parameters": { + "PlaybookName": { + "defaultValue": "Entity-analyzer-Url-Trigger", + "type": "string" + }, + "lookBackDays": { + "defaultValue": 10, + "type": "int", + "metadata": { + "description": "Number of days to look back for entity analysis" + } + }, + "workspaceId": { + "type": "string", + "metadata": { + "description": "The workspace ID for Microsoft Sentinel" + } + } + }, + "variables": { + "MicrosoftSentinelConnectionName": "[[concat('MicrosoftSentinel-', parameters('PlaybookName'))]", + "SentinelMCPConnectionName": "[[concat('SentinelMCP-', parameters('PlaybookName'))]", + "connection-2": "[[concat('/subscriptions/', subscription().subscriptionId, '/providers/Microsoft.Web/locations/', variables('workspace-location-inline'), '/managedApis/Azuresentinel')]", + "_connection-2": "[[variables('connection-2')]", + "connection-3": "[[concat('/subscriptions/', subscription().subscriptionId, '/providers/Microsoft.Web/locations/', variables('workspace-location-inline'), '/managedApis/sentinelmcp')]", + "_connection-3": "[[variables('connection-3')]", + "workspace-location-inline": "[concat('[resourceGroup().locatio', 'n]')]", + "workspace-name": "[parameters('workspace')]", + "workspaceResourceId": "[[resourceId('microsoft.OperationalInsights/Workspaces', variables('workspace-name'))]" + }, + "resources": [ + { + "type": "Microsoft.Logic/workflows", + "apiVersion": "2017-07-01", + "name": "[[parameters('PlaybookName')]", + "location": "[[variables('workspace-location-inline')]", + "identity": { + "type": "SystemAssigned" + }, + "tags": { + "Created By": "ARM Template", + "hidden-SentinelWorkspaceId": "[[variables('workspaceResourceId')]" + }, + "dependsOn": [ + "[[resourceId('Microsoft.Web/connections', variables('MicrosoftSentinelConnectionName'))]", + "[[resourceId('Microsoft.Web/connections', variables('SentinelMCPConnectionName'))]" + ], + "properties": { + "state": "Enabled", + "definition": { + "$schema": "https://schema.management.azure.com/providers/Microsoft.Logic/schemas/2016-06-01/workflowdefinition.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "$connections": { + "type": "Object" + } + }, + "triggers": { + "Microsoft_Sentinel_entity": { + "type": "ApiConnectionWebhook", + "inputs": { + "host": { + "connection": { + "name": "@parameters('$connections')['azuresentinel']['connectionId']" + } + }, + "body": { + "callback_url": "@listCallbackUrl()" + }, + "path": "/entity/@{encodeURIComponent('UrlEntity')}" + } + } + }, + "actions": { + "URL_Analyzer": { + "type": "ApiConnection", + "inputs": { + "host": { + "connection": { + "name": "@parameters('$connections')['sentinelmcp']['connectionId']" + } + }, + "method": "post", + "body": { + "workspaceId": "[[parameters('workspaceId')]", + "lookBackDays": "[[parameters('lookBackDays')]", + "properties": { + "entityType": "Url", + "url": "@{triggerBody()?['Entity']?['properties']?['Url']}" + } + }, + "path": "/aiprimitives/analysis" + } + }, + "Compose": { + "runAfter": { + "URL_Analyzer": [ + "Succeeded" + ] + }, + "type": "Compose", + "inputs": "@concat(\r\n'| 🔍 **Section** | Details |\\n',\r\n'|---|---|\\n',\r\n'| 🏷️ **Classification** | ', replace(replace(replace(coalesce(body('URL_Analyzer')?['classification'], 'N/A'), '\\r\\n', '\\n'), '\\n', '
'), '|', '\\\\|'), ' |\\n',\r\n'| 🕵️ **Entity Type** | ', replace(replace(replace(coalesce(body('URL_Analyzer')?['properties']?['entityType'], 'N/A'), '\\r\\n', '\\n'), '\\n', '
'), '|', '\\\\|'), ' |\\n',\r\n'| 🔎 **Analysis Result** | ', replace(replace(replace(replace(coalesce(body('URL_Analyzer')?['analysis'], 'N/A'), '- ', '• '), '\\r\\n', '\\n'), '\\n', '
'), '|', '\\\\|'), ' |\\n',\r\n'| ✅ **Recommendation** | ', replace(replace(replace(coalesce(body('URL_Analyzer')?['recommendation'], 'N/A'), '\\r\\n', '\\n'), '\\n', '
'), '|', '\\\\|'), ' |\\n',\r\n'| ⚠️ **Disclaimer** | 🤖 ', replace(replace(replace(coalesce(body('URL_Analyzer')?['disclaimer'], 'N/A'), '\\r\\n', '\\n'), '\\n', '
'), '|', '\\\\|'), ' |\\n',\r\n'| 📂 **Data Sources** | ', if(equals(empty(body('URL_Analyzer')?['dataSourceList']), true), 'N/A', concat('• ', replace(join(body('URL_Analyzer')?['dataSourceList'], '\\n• '), '\\n', '
'))), ' |'\r\n)" + }, + "Add_comment_to_incident_(V3)": { + "runAfter": { + "Compose": [ + "Succeeded" + ] + }, + "type": "ApiConnection", + "inputs": { + "host": { + "connection": { + "name": "@parameters('$connections')['azuresentinel']['connectionId']" + } + }, + "method": "post", + "body": { + "incidentArmId": "@triggerBody()?['IncidentArmID']", + "message": "

@{outputs('Compose')}

" + }, + "path": "/Incidents/Comment" + } + } + } + }, + "parameters": { + "$connections": { + "value": { + "azuresentinel": { + "connectionId": "[[resourceId('Microsoft.Web/connections', variables('MicrosoftSentinelConnectionName'))]", + "connectionName": "[[variables('MicrosoftSentinelConnectionName')]", + "id": "[[concat('/subscriptions/', subscription().subscriptionId, '/providers/Microsoft.Web/locations/', variables('workspace-location-inline'), '/managedApis/Azuresentinel')]", + "connectionProperties": { + "authentication": { + "type": "ManagedServiceIdentity" + } + } + }, + "sentinelmcp": { + "connectionId": "[[resourceId('Microsoft.Web/connections', variables('SentinelMCPConnectionName'))]", + "connectionName": "[[variables('SentinelMCPConnectionName')]", + "id": "[[concat('/subscriptions/', subscription().subscriptionId, '/providers/Microsoft.Web/locations/', variables('workspace-location-inline'), '/managedApis/sentinelmcp')]" + } + } + } + } + } + }, + { + "type": "Microsoft.Web/connections", + "apiVersion": "2016-06-01", + "name": "[[variables('MicrosoftSentinelConnectionName')]", + "location": "[[variables('workspace-location-inline')]", + "kind": "V1", + "properties": { + "displayName": "[[variables('MicrosoftSentinelConnectionName')]", + "parameterValueType": "Alternative", + "api": { + "id": "[[variables('_connection-2')]" + } + } + }, + { + "type": "Microsoft.Web/connections", + "apiVersion": "2016-06-01", + "name": "[[variables('SentinelMCPConnectionName')]", + "location": "[[variables('workspace-location-inline')]", + "kind": "V1", + "properties": { + "displayName": "[[variables('SentinelMCPConnectionName')]", + "api": { + "id": "[[variables('_connection-3')]" + } + } + }, + { + "type": "Microsoft.OperationalInsights/workspaces/providers/metadata", + "apiVersion": "2022-01-01-preview", + "name": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat('Playbook-', last(split(variables('playbookId21'),'/'))))]", + "properties": { + "parentId": "[variables('playbookId21')]", + "contentId": "[variables('_playbookContentId21')]", + "kind": "Playbook", + "version": "[variables('playbookVersion21')]", + "source": { + "kind": "Solution", + "name": "SentinelSOARessentials", + "sourceId": "[variables('_solutionId')]" + }, + "author": { + "name": "Microsoft", + "email": "[variables('_email')]" + }, + "support": { + "tier": "Microsoft", + "name": "Microsoft Corporation", + "email": "support@microsoft.com", + "link": "https://support.microsoft.com/" + } + } + } + ], + "metadata": { + "title": "URL Entity Trigger Analyzer", + "description": "This playbook is triggered manually when a URL entity is selected in a Microsoft Sentinel incident and provides detailed security insights including classification, analysis results, and recommendations.", + "prerequisites": [ + "1. The user deploying this Logic App needs to have a Contributor Role.", + "2. The user has permissions to access Microsoft Sentinel workspace.", + "3. You have the Workspace ID for your Sentinel environment.", + "4. The SentinelMCP connector is available in your environment.", + "5. Access to Microsoft Sentinel portal in Azure (not Defender portal)." + ], + "postDeployment": [ + "1. Authenticate the connections: Go to the Logic App → API connections and authenticate Microsoft Sentinel connection with a user that has Sentinel permissions.", + "2. Authenticate the SentinelMCP connection with Microsoft Sentinel MCP permissions.", + "3. The playbook will be available to run manually from incident entities.", + "4. Results will be automatically added as comments to the relevant incidents." + ], + "lastUpdateTime": "2025-12-07T00:00:00Z", + "entities": [ + "URL" + ], + "tags": [ + "Enrichment", + "Utilities", + "Entity Analysis" + ], + "releaseNotes": { + "version": "1.0", + "title": "[variables('blanks')]", + "notes": [ + "Initial version" + ] + } + } + }, + "packageKind": "Solution", + "packageVersion": "[variables('_solutionVersion')]", + "packageName": "[variables('_solutionName')]", + "packageId": "[variables('_solutionId')]", + "contentSchemaVersion": "3.0.0", + "contentId": "[variables('_playbookContentId21')]", + "contentKind": "Playbook", + "displayName": "Entity-analyzer-Url-Trigger", + "contentProductId": "[variables('_playbookcontentProductId21')]", + "id": "[variables('_playbookcontentProductId21')]", + "version": "[variables('playbookVersion21')]" + } + }, { "type": "Microsoft.OperationalInsights/workspaces/providers/contentTemplates", "apiVersion": "2023-04-01-preview", @@ -7290,7 +8312,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "AutomationHealth Workbook with template version 3.0.3", + "description": "AutomationHealth Workbook with template version 3.0.4", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('workbookVersion1')]", @@ -7374,7 +8396,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "IncidentOverview Workbook with template version 3.0.3", + "description": "IncidentOverview Workbook with template version 3.0.4", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('workbookVersion2')]", @@ -7392,7 +8414,7 @@ }, "properties": { "displayName": "[parameters('workbook2-name')]", - "serializedData": "{\"version\":\"Notebook/1.0\",\"items\":[{\"type\":1,\"content\":{\"json\":\"# Incident Overview\"},\"customWidth\":\"35\",\"name\":\"Headline\"},{\"type\":9,\"content\":{\"version\":\"KqlParameterItem/1.0\",\"parameters\":[{\"id\":\"9a199167-2dde-49dd-8f01-23e9d1fa8151\",\"version\":\"KqlParameterItem/1.0\",\"name\":\"InternalWSs\",\"type\":1,\"isRequired\":true,\"query\":\"SecurityIncident\\r\\n| take 1\\r\\n| parse IncidentUrl with * \\\"/workspaces/\\\" Workspace \\\"/\\\" *\\r\\n| project Workspace\",\"isHiddenWhenLocked\":true,\"timeContext\":{\"durationMs\":2592000000},\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\"},{\"id\":\"7806fefd-432f-4828-9756-8c0be5c08d07\",\"version\":\"KqlParameterItem/1.0\",\"name\":\"InternalSub\",\"type\":1,\"isRequired\":true,\"query\":\"SecurityIncident\\r\\n| take 1\\r\\n| parse IncidentUrl with * \\\"/subscriptions/\\\" subscriptions \\\"/\\\" *\\r\\n| project subscriptions\",\"isHiddenWhenLocked\":true,\"timeContext\":{\"durationMs\":2592000000},\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\"},{\"id\":\"55d3ab63-6e1f-4d02-8d9e-2225526689c7\",\"version\":\"KqlParameterItem/1.0\",\"name\":\"Subscription\",\"type\":6,\"isRequired\":true,\"query\":\"summarize by subscriptionId\\r\\n| project subscriptionId, Subscription=strcat(\\\"/subscriptions/\\\", subscriptionId)\\r\\n| extend selected = iff(subscriptionId =~ '{InternalSub}', true, false)\\r\\n\",\"crossComponentResources\":[\"value::all\"],\"typeSettings\":{\"showDefault\":false},\"timeContext\":{\"durationMs\":0},\"timeContextFromParameter\":\"TimeRange\",\"queryType\":1,\"resourceType\":\"microsoft.resourcegraph/resources\",\"value\":\"\"},{\"id\":\"95a45501-31b5-4ea2-bcb3-eb208e0080e2\",\"version\":\"KqlParameterItem/1.0\",\"name\":\"Workspace\",\"type\":5,\"isRequired\":true,\"multiSelect\":true,\"quote\":\"'\",\"delimiter\":\",\",\"query\":\"//resources | where type =~ 'Microsoft.operationsmanagement/solutions' | where name contains //'SecurityInsights' | project id //= tostring(properties.workspaceResourceId)\\r\\n\\r\\nwhere type =~ 'microsoft.operationalinsights/workspaces'\\r\\n| project value =id, label = name, selected = iff(name =~ '{InternalWSs}', true, false)\\r\\n\\r\\n\\r\\n\",\"crossComponentResources\":[\"value::all\"],\"typeSettings\":{\"showDefault\":false},\"timeContextFromParameter\":\"TimeRange\",\"queryType\":1,\"resourceType\":\"microsoft.resourcegraph/resources\"},{\"id\":\"7d597ad7-4a2a-45ed-a4fe-7ee32de0fc22\",\"version\":\"KqlParameterItem/1.0\",\"name\":\"TimeRange\",\"label\":\"Incident Creation Time\",\"type\":4,\"isRequired\":true,\"value\":{\"durationMs\":2592000000},\"typeSettings\":{\"selectableValues\":[{\"durationMs\":14400000},{\"durationMs\":43200000},{\"durationMs\":86400000},{\"durationMs\":172800000},{\"durationMs\":259200000},{\"durationMs\":604800000},{\"durationMs\":1209600000},{\"durationMs\":2592000000}],\"allowCustom\":true}},{\"id\":\"3a87d4f7-42cc-4c62-b543-6b5d9ab8cf27\",\"version\":\"KqlParameterItem/1.0\",\"name\":\"Severity\",\"type\":2,\"isRequired\":true,\"multiSelect\":true,\"quote\":\"'\",\"delimiter\":\",\",\"query\":\"SecurityIncident\\r\\n| summarize Count = count(IncidentNumber) by Severity\\r\\n| project Value = Severity, Label = strcat(Severity, \\\": \\\", Count)\",\"value\":[\"value::all\"],\"typeSettings\":{\"additionalResourceOptions\":[\"value::all\"],\"selectAllValue\":\"*\"},\"timeContext\":{\"durationMs\":0},\"timeContextFromParameter\":\"TimeRange\",\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\"},{\"id\":\"81085d3a-5aca-488e-b7c6-ecf1167e59f7\",\"version\":\"KqlParameterItem/1.0\",\"name\":\"Tactics\",\"type\":2,\"isRequired\":true,\"multiSelect\":true,\"quote\":\"'\",\"delimiter\":\",\",\"query\":\"SecurityIncident\\r\\n| extend Tactics = todynamic(AdditionalData.tactics)\\r\\n| mv-expand Tactics to typeof(string)\\r\\n| summarize Count=count(IncidentNumber) by Tactics\\r\\n| project Value = Tactics, Label = strcat(Tactics, \\\": \\\", Count)\",\"value\":[\"value::all\"],\"typeSettings\":{\"additionalResourceOptions\":[\"value::all\"],\"selectAllValue\":\"*\"},\"timeContext\":{\"durationMs\":0},\"timeContextFromParameter\":\"TimeRange\",\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\"},{\"id\":\"0f9efb0d-ac34-41d0-8a19-165840eb2a71\",\"version\":\"KqlParameterItem/1.0\",\"name\":\"Owner\",\"type\":2,\"isRequired\":true,\"multiSelect\":true,\"quote\":\"'\",\"delimiter\":\",\",\"query\":\"SecurityIncident\\r\\n| extend owner = tostring(Owner.assignedTo) \\r\\n| summarize Count=count(IncidentNumber) by Owner= case(owner==\\\"\\\", \\\"Unassigned\\\",owner)\\r\\n| project Value = Owner, Label = strcat(Owner, \\\": \\\", Count)\",\"value\":[\"value::all\"],\"typeSettings\":{\"additionalResourceOptions\":[\"value::all\"],\"selectAllValue\":\"*\"},\"timeContext\":{\"durationMs\":0},\"timeContextFromParameter\":\"TimeRange\",\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\"},{\"id\":\"cf86113b-59ad-4fc9-aeb7-9b44e230641e\",\"version\":\"KqlParameterItem/1.0\",\"name\":\"Product\",\"label\":\"Product Name\",\"type\":2,\"isRequired\":true,\"multiSelect\":true,\"quote\":\"'\",\"delimiter\":\",\",\"query\":\"SecurityIncident\\r\\n| extend Product = tostring(parse_json(tostring(AdditionalData.alertProductNames))[0]) \\r\\n| summarize Count=count(IncidentNumber) by Product\\r\\n| project Value = Product, Label = strcat(Product, \\\": \\\", Count)\\r\\n\",\"value\":[\"value::all\"],\"typeSettings\":{\"additionalResourceOptions\":[\"value::all\"],\"selectAllValue\":\"*\"},\"timeContext\":{\"durationMs\":0},\"timeContextFromParameter\":\"TimeRange\",\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\"},{\"id\":\"1fea48e7-99b2-4664-8eb6-bd35fc4efaf0\",\"version\":\"KqlParameterItem/1.0\",\"name\":\"resourceGroup\",\"type\":1,\"query\":\"resources\\r\\n| where type =~ 'microsoft.operationalinsights/workspaces'\\r\\n| where id == \\\"{Workspace:lable}\\\"\\r\\n| project resourceGroup\",\"crossComponentResources\":[\"{Subscription}\"],\"isHiddenWhenLocked\":true,\"queryType\":1,\"resourceType\":\"microsoft.resourcegraph/resources\"},{\"id\":\"2908f26a-6238-43ed-9aa0-546c9041d918\",\"version\":\"KqlParameterItem/1.0\",\"name\":\"Help\",\"label\":\"Show Help\",\"type\":10,\"isRequired\":true,\"jsonData\":\"[{ \\\"value\\\": \\\"Yes\\\", \\\"label\\\": \\\"Yes\\\"},\\r\\n {\\\"value\\\": \\\"No\\\", \\\"label\\\": \\\"No\\\", \\\"selected\\\":true }]\",\"timeContext\":{\"durationMs\":0},\"timeContextFromParameter\":\"TimeRange\"}],\"style\":\"above\",\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\"},\"customWidth\":\"100\",\"name\":\"parameters - 6\"},{\"type\":1,\"content\":{\"json\":\"The Incident Overview workbook is designed to assist in triaging and investigation by providing in-depth information about the incident, including:\\r\\n* General information\\r\\n* Entity data\\r\\n* Triage time (time between incident creation and first response)\\r\\n* Mitigation time (time between incident creation and closing)\\r\\n* Comments\\r\\n* Remediation information from the Alerts or from a Watchlist - setup readme: https://github.com/Azure/Azure-Sentinel/wiki/SOC-Process-Framework\\r\\n\\r\\nCustomize this workbook by saving and editing it. \\r\\nYou can reach this workbook template from the incidents panel as well. Once you have customized it, the link from the incident panel will open the customized workbook instead of the template.\",\"style\":\"info\"},\"conditionalVisibility\":{\"parameterName\":\"Help\",\"comparison\":\"isEqualTo\",\"value\":\"Yes\"},\"customWidth\":\"100\",\"name\":\"Info\"},{\"type\":9,\"content\":{\"version\":\"KqlParameterItem/1.0\",\"crossComponentResources\":[\"{Workspace}\"],\"parameters\":[{\"id\":\"9aec751b-07bd-43ba-80b9-f711887dce45\",\"version\":\"KqlParameterItem/1.0\",\"name\":\"IncidentNumber\",\"label\":\"Incident Number\",\"type\":1,\"isRequired\":true,\"value\":\"\",\"timeContext\":{\"durationMs\":0},\"timeContextFromParameter\":\"TimeRange\"},{\"id\":\"9ef1a34d-5c8e-42ad-b1d7-1353e0091060\",\"version\":\"KqlParameterItem/1.0\",\"name\":\"testRemediation\",\"type\":1,\"isRequired\":true,\"query\":\"SecurityIncident\\r\\n| where IncidentNumber == '{IncidentNumber:value}' \\r\\n| summarize arg_max(LastModifiedTime,*) by tostring(IncidentNumber)\\r\\n| extend Alerts = extract(\\\"\\\\\\\\[(.*?)\\\\\\\\]\\\", 1, tostring(AlertIds))\\r\\n| mv-expand AlertIds to typeof(string)\\r\\n| join \\r\\n(\\r\\n SecurityAlert\\r\\n | extend Remediation_ = parse_json(RemediationSteps)\\r\\n | mv-expand Remediation_\\r\\n) on $left.AlertIds == $right.SystemAlertId\\r\\n| summarize Remediation=make_set(tostring(Remediation_)) by IncidentNumber, Title, Severity\\r\\n| mv-expand Remediation to typeof(string)\\r\\n| project value=iif(isempty(Remediation),'0','1')\",\"crossComponentResources\":[\"{Workspace}\"],\"isHiddenWhenLocked\":true,\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\"},{\"id\":\"e5d4131c-43a9-4f92-87c9-dbf647530c9c\",\"version\":\"KqlParameterItem/1.0\",\"name\":\"watchListExists\",\"type\":1,\"isRequired\":true,\"query\":\"_GetWatchlist('SocRA')\\r\\n| limit 1\",\"crossComponentResources\":[\"{Workspace}\"],\"isHiddenWhenLocked\":true,\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\"}],\"style\":\"pills\",\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\"},\"customWidth\":\"50\",\"name\":\"parameters - 6 - Copy\"},{\"type\":9,\"content\":{\"version\":\"KqlParameterItem/1.0\",\"crossComponentResources\":[\"{Workspace}\"],\"parameters\":[{\"id\":\"f978edb2-9886-4bff-8e12-8280800321c3\",\"version\":\"KqlParameterItem/1.0\",\"name\":\"IncidentID\",\"label\":\"Incident Name\",\"type\":1,\"query\":\"SecurityIncident\\r\\n| where IncidentNumber == {IncidentNumber}\\r\\n| take 1\\r\\n| project IncidentName\\r\\n\",\"crossComponentResources\":[\"{Workspace}\"],\"isHiddenWhenLocked\":true,\"timeContext\":{\"durationMs\":0},\"timeContextFromParameter\":\"TimeRange\",\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\"},{\"id\":\"3b8e6cdd-4578-49cb-a515-1f9dec104fd7\",\"version\":\"KqlParameterItem/1.0\",\"name\":\"RuleId\",\"label\":\"Rule Id\",\"type\":1,\"query\":\"SecurityIncident\\r\\n| where IncidentNumber == {IncidentNumber}\\r\\n| summarize arg_max(TimeGenerated, RelatedAnalyticRuleIds) by IncidentNumber\\r\\n| project RelatedAnalyticRuleIds\",\"crossComponentResources\":[\"{Workspace}\"],\"timeContext\":{\"durationMs\":0},\"timeContextFromParameter\":\"TimeRange\",\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\"}],\"style\":\"pills\",\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\"},\"conditionalVisibility\":{\"parameterName\":\"IncidentNumber\",\"comparison\":\"isEqualTo\",\"value\":\"e\"},\"customWidth\":\"50\",\"name\":\"Invisible parameters\"},{\"type\":1,\"content\":{\"json\":\"## General Incident Information \"},\"customWidth\":\"67\",\"name\":\"Headline - general info\"},{\"type\":3,\"content\":{\"version\":\"KqlItem/1.0\",\"query\":\"let incidentNumberToCheck = '{IncidentNumber}';\\r\\nlet incidentWithNoAlertsQuery = SecurityIncident\\r\\n| where IncidentNumber == incidentNumberToCheck\\r\\n| summarize arg_max(TimeGenerated,CreatedTime,Status, Severity, Owner, AdditionalData, IncidentUrl, Comments, Classification,ClassificationReason, ClassificationComment,Labels, Title, AlertIds) by IncidentNumber\\r\\n| where array_length(AlertIds) == 0\\r\\n| where Severity in ({Severity}) or '{Severity:label}' == \\\"All\\\"\\r\\n| extend Tactics = todynamic(AdditionalData.tactics)\\r\\n| extend Owner = todynamic(Owner.assignedTo), IncidentCreated = format_datetime(CreatedTime,'yy-MM-dd HH:mm')\\r\\n| where Owner in ({Owner}) or '{Owner:label}' == \\\"All\\\"\\r\\n| extend Product = todynamic((parse_json(tostring(AdditionalData.alertProductNames))[0]))\\r\\n| where Product in ({Product}) or '{Product:label}' == \\\"All\\\"\\r\\n| extend Tags = extract_all('labelName\\\":\\\"(.*?)\\\"',tostring(Labels))\\r\\n| extend Owner = case(tostring(Owner)==\\\"\\\", \\\"Unassigned\\\",tostring(Owner)), Products = strcat_array(AdditionalData.alertProductNames, \\\", \\\"), Alerts = tostring(AdditionalData.alertsCount), Bookmarks = tostring(AdditionalData.bookmarksCount), Comments = tostring(AdditionalData.commentsCount), Tactics = strcat_array(AdditionalData.tactics, \\\", \\\"), Labels = strcat_array(Tags, \\\", \\\")\\r\\n;\\r\\nlet incidentWithAlertsQuery = SecurityIncident\\r\\n| where IncidentNumber == incidentNumberToCheck\\r\\n| summarize arg_max(TimeGenerated,CreatedTime,Status, Severity, Owner, AdditionalData, IncidentUrl, Comments, Classification,ClassificationReason, ClassificationComment,Labels, Title, AlertIds) by IncidentNumber\\r\\n| where array_length(AlertIds) > 0\\r\\n| where Severity in ({Severity}) or '{Severity:label}' == \\\"All\\\"\\r\\n| extend Tactics = todynamic(AdditionalData.tactics)\\r\\n| extend Owner = todynamic(Owner.assignedTo), IncidentCreated = format_datetime(CreatedTime,'yy-MM-dd HH:mm')\\r\\n| where Owner in ({Owner}) or '{Owner:label}' == \\\"All\\\"\\r\\n| extend Product = todynamic((parse_json(tostring(AdditionalData.alertProductNames))[0]))\\r\\n| where Product in ({Product}) or '{Product:label}' == \\\"All\\\"\\r\\n| extend Tags = extract_all('labelName\\\":\\\"(.*?)\\\"',tostring(Labels))\\r\\n| extend Owner = case(tostring(Owner)==\\\"\\\", \\\"Unassigned\\\",tostring(Owner)), Products = strcat_array(AdditionalData.alertProductNames, \\\", \\\"), Alerts = tostring(AdditionalData.alertsCount), Bookmarks = tostring(AdditionalData.bookmarksCount), Comments = tostring(AdditionalData.commentsCount), Tactics = strcat_array(AdditionalData.tactics, \\\", \\\"), Labels = strcat_array(Tags, \\\", \\\")\\r\\n| mv-expand AlertIds to typeof(string)\\r\\n| join kind=leftouter\\r\\n(SecurityAlert\\r\\n| summarize arg_max(TimeGenerated,AlertName, Description, AlertType, Entities) by SystemAlertId) on $left.AlertIds == $right.SystemAlertId\\r\\n| summarize AlertName = makelist(AlertName), AlertType = makelist(AlertType) by Comments, Labels, Title, Products, AlertsCount = Alerts, Bookmarks, Status, Severity, Owner, IncidentCreated, ClassificationComment, Classification, ClassificationReason\\r\\n| extend AlertNames = strcat_array(AlertName, \\\", \\\"), AlertTypes = strcat_array(AlertType, \\\", \\\")\\r\\n;\\r\\nincidentWithNoAlertsQuery\\r\\n| union incidentWithAlertsQuery\\r\\n| project packed = pack_all()\\r\\n| mv-expand packed\\r\\n| parse tostring(packed) with * '\\\"' Field '\\\":\\\"' Value '\\\"}'\\r\\n| where Field in ('Severity', 'Owner','Status', 'AlertsCount','Products','Title', 'IncidentCreated', 'Labels','Bookmarks', 'AlertNames', 'AlertsType', 'Classification','ClassificationComment','ClassificationReason')\\r\\n| extend Field1 = case(Field== \\\"IncidentCreated\\\", \\\"Time created\\\", Field == \\\"AlertsCount\\\", \\\"Alert count\\\", Field == \\\"ClassificationComment\\\", \\\"Classification Comment\\\", Field == \\\"ClassificationReason\\\", \\\"Classification Reason\\\", Field == \\\"AlertNames\\\", \\\"Alert Names\\\", Field)\\r\\n| extend Order = case(Field==\\\"Title\\\", 1,Field==\\\"IncidentCreated\\\", 2,Field==\\\"Severity\\\", 3,Field==\\\"Status\\\", 4,Field==\\\"Owner\\\", 5,Field==\\\"Products\\\", 6,Field==\\\"AlertsType\\\",6,Field==\\\"AlertsCount\\\", 7,Field==\\\"Bookmarks\\\", 8, Field==\\\"Labels\\\", 9,Field==\\\"Classification\\\", 10,Field==\\\"ClassificationReason\\\",11, 100)\",\"size\":0,\"noDataMessage\":\"Enter an incident number\",\"noDataMessageStyle\":5,\"timeContext\":{\"durationMs\":2592000000},\"timeContextFromParameter\":\"TimeRange\",\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\",\"crossComponentResources\":[\"{Workspace}\"],\"visualization\":\"tiles\",\"tileSettings\":{\"titleContent\":{\"columnMatch\":\"Field1\",\"formatter\":1,\"numberFormat\":{\"unit\":0,\"options\":{\"style\":\"decimal\",\"useGrouping\":false}}},\"leftContent\":{\"columnMatch\":\"Value\",\"formatter\":18,\"formatOptions\":{\"thresholdsOptions\":\"colors\",\"thresholdsGrid\":[{\"operator\":\"==\",\"thresholdValue\":\"Low\",\"representation\":\"green\",\"text\":\"{0}{1}\"},{\"operator\":\"==\",\"thresholdValue\":\"Medium\",\"representation\":\"orange\",\"text\":\"{0}{1}\"},{\"operator\":\"==\",\"thresholdValue\":\"High\",\"representation\":\"redBright\",\"text\":\"{0}{1}\"},{\"operator\":\"==\",\"thresholdValue\":\"New\",\"representation\":\"blue\",\"text\":\"{0}{1}\"},{\"operator\":\"==\",\"thresholdValue\":\"Active\",\"representation\":\"lightBlue\",\"text\":\"{0}{1}\"},{\"operator\":\"==\",\"thresholdValue\":\"Closed\",\"representation\":\"gray\",\"text\":\"{0}{1}\"},{\"operator\":\"Default\",\"text\":\"{0}{1}\"}]}},\"secondaryContent\":{\"columnMatch\":\"Remediation_\",\"formatter\":7,\"formatOptions\":{\"linkTarget\":\"Url\"}},\"showBorder\":false,\"sortCriteriaField\":\"Order\",\"sortOrderField\":1,\"size\":\"auto\"}},\"customWidth\":\"67\",\"name\":\"general info\"},{\"type\":12,\"content\":{\"version\":\"NotebookGroup/1.0\",\"groupType\":\"editable\",\"items\":[{\"type\":1,\"content\":{\"json\":\"## Closing Classifications of Similar Incidents\"},\"name\":\"Headline - classification\"},{\"type\":1,\"content\":{\"json\":\"Closing classifications of incidents that where created from the same rule in the past month\",\"style\":\"info\"},\"conditionalVisibility\":{\"parameterName\":\"Help\",\"comparison\":\"isEqualTo\",\"value\":\"Yes\"},\"name\":\"Info - Copy\"},{\"type\":3,\"content\":{\"version\":\"KqlItem/1.0\",\"query\":\"let alertText = strcat_array(dynamic([{RuleId}]),\\\",\\\");\\r\\nlet getAmountOfIncidentForRuleId = (classification:string){\\r\\n SecurityIncident\\r\\n | where TimeGenerated >= ago(30d)\\r\\n | where Classification == classification\\r\\n | mv-expand AlertId=AlertIds\\r\\n | extend AlertId=tostring(AlertId)\\r\\n | join (SecurityAlert| where TimeGenerated >=ago(30d)) on $left.AlertId==$right.SystemAlertId\\r\\n | mv-expand RuleId=RelatedAnalyticRuleIds\\r\\n | extend RuleId=iff(ProductName!= 'Azure Sentinel', ProductName,RuleId)\\r\\n | summarize counter=count() by RuleIdentifier=tostring(RuleId)\\r\\n | extend RuleId=RuleIdentifier\\r\\n | project-away RuleIdentifier\\r\\n};\\r\\nlet falsePositiveClassificationTable = getAmountOfIncidentForRuleId(\\\"FalsePositive\\\") | extend FalsePositiveCounter=counter | project-away counter;\\r\\nlet undeterminedClassificationTable = getAmountOfIncidentForRuleId(\\\"Undetermined\\\") | extend UndeterminedCounter=counter | project-away counter;\\r\\nlet benignPositiveClassificationTable = getAmountOfIncidentForRuleId(\\\"BenignPositive\\\") | extend BenignPositiveCounter=counter | project-away counter;\\r\\nlet truePositiveClassificationTable = getAmountOfIncidentForRuleId(\\\"TruePositive\\\") | extend TruePositiveCounter=counter | project-away counter;\\r\\nlet closedIncidentTable = SecurityIncident| where TimeGenerated >= ago(30d) |where Status == \\\"Closed\\\" | mv-expand AlertId=AlertIds| extend AlertId=tostring(AlertId)| join SecurityAlert on $left.AlertId==$right.SystemAlertId| mv-expand RelatedAnalyticRuleIds| extend RuleId= iff(ProductName == 'Azure Sentinel', tostring(RelatedAnalyticRuleIds), ProductName);\\r\\nlet joinByRuleId = (T:(RuleId:string), S:(RuleId:string)){\\r\\n T \\r\\n | join kind=fullouter S on $left.RuleId == $right.RuleId\\r\\n | extend RuleId= iff(RuleId == '', RuleId1,RuleId)\\r\\n | project-away RuleId1\\r\\n};\\r\\njoinByRuleId(joinByRuleId(joinByRuleId(joinByRuleId(falsePositiveClassificationTable, undeterminedClassificationTable) , benignPositiveClassificationTable), truePositiveClassificationTable),closedIncidentTable)\\r\\n| join kind=leftouter (SecurityAlert\\r\\n| where TimeGenerated >= ago(30d)\\r\\n| where ProductName == 'Azure Sentinel'\\r\\n| extend RuleId = parsejson( tostring(todynamic(ExtendedProperties)['Analytic Rule Ids']))\\r\\n| mv-expand RuleId=RuleId\\r\\n| extend RuleId=tostring(RuleId)\\r\\n| extend RuleName= tostring(todynamic(ExtendedProperties)['Analytic Rule Name'])\\r\\n| project RuleId,RuleName\\r\\n| distinct RuleId,RuleName)\\r\\n on $left.RuleId==$right.RuleId\\r\\n| extend RuleName=iff(isempty(RuleName),RuleId,RuleName)\\r\\n| project-away RuleId1\\r\\n| where alertText has RuleId \\r\\n| summarize dcount(IncidentNumber) by Classification\",\"size\":0,\"noDataMessage\":\"No recent closed incident were found\",\"noDataMessageStyle\":4,\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\",\"crossComponentResources\":[\"{Workspace}\"],\"visualization\":\"piechart\",\"tileSettings\":{\"titleContent\":{\"columnMatch\":\"Field1\",\"formatter\":1,\"numberFormat\":{\"unit\":0,\"options\":{\"style\":\"decimal\"}}},\"leftContent\":{\"columnMatch\":\"Value\",\"formatter\":1},\"showBorder\":false,\"sortCriteriaField\":\"Order\",\"sortOrderField\":1,\"size\":\"auto\"}},\"name\":\"Closing classification\"}]},\"customWidth\":\"33\",\"name\":\"Similar\"},{\"type\":12,\"content\":{\"version\":\"NotebookGroup/1.0\",\"groupType\":\"editable\",\"title\":\"⚠️ Remediations - click to Open\",\"expandable\":true,\"expanded\":true,\"items\":[{\"type\":1,\"content\":{\"json\":\"## Recommended Actions\"},\"name\":\"text - 15\"},{\"type\":1,\"content\":{\"json\":\"### Remediations and Actions Help\\r\\nIn this section of the Workbook, which only is visiable if an Alert has remediation entries, the default Remediations that are contained in the Alert data will be shown (Basic view). \\r\\nNote, not all Alerts have this data. \\r\\nHowever you can provide you own set of Alerts mapped to the Alert \\\"Title\\\". This enhanced feature, uses a Watchlist which has an alias name of: SocRA when you import it (Advanced view).\\r\\n\\r\\n \\r\\n### WatchList Instructions\\r\\n\\r\\n* You must download the Watchlist file called:\\r\\n### SOCAnalystActionsByAlert.csv \\r\\n\\r\\nIcon-security-248(https://github.com/Azure/Azure-Sentinel/blob/master/docs/SOCAnalystActionsByAlert.csv)\\r\\n \\r\\n * Name the Watchlist alias as: \\r\\n ### SocRA \\r\\n * Note: SocRA is case sensitive, you need an uppercase S, R and A.\",\"style\":\"info\"},\"conditionalVisibility\":{\"parameterName\":\"Help\",\"comparison\":\"isEqualTo\",\"value\":\"Yes\"},\"name\":\"text - ra Help text\"},{\"type\":3,\"content\":{\"version\":\"KqlItem/1.0\",\"query\":\"SecurityIncident\\r\\n| where IncidentNumber == '{IncidentNumber:value}' \\r\\n| summarize arg_max(LastModifiedTime,*) by tostring(IncidentNumber)\\r\\n| extend Alerts = extract(\\\"\\\\\\\\[(.*?)\\\\\\\\]\\\", 1, tostring(AlertIds))\\r\\n| mv-expand AlertIds to typeof(string)\\r\\n| join \\r\\n(\\r\\n SecurityAlert\\r\\n | extend Remediation_ = parse_json(RemediationSteps)\\r\\n | mv-expand Remediation_\\r\\n) on $left.AlertIds == $right.SystemAlertId\\r\\n| summarize Remediation=make_set(tostring(Remediation_)) by IncidentNumber, Title, Severity\\r\\n| mv-expand Remediation to typeof(string)\\r\\n// extract URL from the string \\r\\n| extend url_ = iif(Remediation contains 'https://',extract (\\\"https://([a-zA-Z0-9-_://@.?%=&# +]*)\\\",0,tostring(Remediation)),\\\"\\\")\\r\\n| serialize\\r\\n| extend IncidentNumber = iif(prev(IncidentNumber) == IncidentNumber,'',IncidentNumber), Title = iif(prev(Title) == Title,'',Title)\\r\\n\",\"size\":1,\"title\":\"Incident and Remediations - Basic View (from Alert) \",\"timeContext\":{\"durationMs\":2592000000},\"timeContextFromParameter\":\"TimeRange\",\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\",\"crossComponentResources\":[\"{Workspace}\"],\"visualization\":\"table\",\"gridSettings\":{\"formatters\":[{\"columnMatch\":\"Severity\",\"formatter\":18,\"formatOptions\":{\"thresholdsOptions\":\"colors\",\"thresholdsGrid\":[{\"operator\":\"==\",\"thresholdValue\":\"High\",\"representation\":\"redBright\",\"text\":\"{0}{1}\"},{\"operator\":\"==\",\"thresholdValue\":\"Medium\",\"representation\":\"orange\",\"text\":\"{0}{1}\"},{\"operator\":\"==\",\"thresholdValue\":\"Low\",\"representation\":\"green\",\"text\":\"{0}{1}\"},{\"operator\":\"==\",\"thresholdValue\":\"Informational\",\"representation\":\"gray\",\"text\":\"{0}{1}\"},{\"operator\":\"Default\",\"representation\":\"blue\",\"text\":\"{0}{1}\"}]}},{\"columnMatch\":\"Remediation\",\"formatter\":7,\"formatOptions\":{\"linkTarget\":\"CellDetails\",\"linkIsContextBlade\":true},\"tooltipFormat\":{\"tooltip\":\"Click to see more details about the Remediation step\"}},{\"columnMatch\":\"url_\",\"formatter\":7,\"formatOptions\":{\"linkTarget\":\"Url\",\"linkLabel\":\"\",\"linkIsContextBlade\":false},\"tooltipFormat\":{\"tooltip\":\"Open this link (in another Tab)\"}},{\"columnMatch\":\"entityList\",\"formatter\":7,\"formatOptions\":{\"linkTarget\":\"CellDetails\",\"linkIsContextBlade\":true}}],\"labelSettings\":[{\"columnId\":\"url_\",\"label\":\"URL\",\"comment\":\"Show a URL if available \"}]},\"tileSettings\":{\"titleContent\":{\"columnMatch\":\"IncidentNumber\"},\"subtitleContent\":{\"columnMatch\":\"Title\"},\"leftContent\":{\"columnMatch\":\"Remediation\"},\"secondaryContent\":{\"columnMatch\":\"url_\",\"formatter\":7,\"formatOptions\":{\"linkTarget\":\"Url\",\"linkIsContextBlade\":false}},\"showBorder\":false},\"graphSettings\":{\"type\":2,\"topContent\":{\"columnMatch\":\"IncidentNumber\"},\"leftContent\":{\"columnMatch\":\"Title\"},\"centerContent\":{\"columnMatch\":\"url_\",\"formatter\":7,\"formatOptions\":{\"linkTarget\":\"Url\"}},\"hivesContent\":{\"columnMatch\":\"Title\",\"formatter\":7,\"formatOptions\":{\"linkTarget\":\"Url\"}},\"nodeIdField\":\"Remediation\",\"sourceIdField\":\"Title\",\"targetIdField\":\"Remediation\",\"graphOrientation\":3,\"showOrientationToggles\":false,\"staticNodeSize\":100,\"colorSettings\":{\"nodeColorField\":\"url_\",\"type\":1,\"colorPalette\":\"default\"},\"groupByField\":\"Title\",\"hivesMargin\":5}},\"conditionalVisibility\":{\"parameterName\":\"watchListExists\",\"comparison\":\"isEqualTo\"},\"name\":\"query - basic View \"},{\"type\":3,\"content\":{\"version\":\"KqlItem/1.0\",\"query\":\"_GetWatchlist('SocRA')\\r\\n| join\\r\\n (\\r\\n SecurityIncident | where IncidentNumber == '{IncidentNumber}' \\r\\n | summarize arg_max(TimeGenerated, CreatedTime, Status, Severity, Owner, AdditionalData, IncidentUrl, Comments, Classification, ClassificationReason, ClassificationComment, Labels, Title, AlertIds) by IncidentNumber\\r\\n ) on $left.Alert == $right.Title\\r\\n| project-keep A*, Status, Severity //, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19\\r\\n| project-reorder Alert, Status, Severity, A* asc\\r\\n| project-away AlertIds, AdditionalData\\r\\n| evaluate narrow()\\r\\n| extend url_ = iif(Value contains 'https://',extract (\\\"https://([a-zA-Z0-9-_://@.?%=&# +]*)\\\",0,Value),\\\"\\\")\\r\\n| extend r = iif(Column startswith 'A', extract(@\\\"\\\\d+\\\",0,tostring(Column)),\\\"\\\")\\r\\n| where isnotempty(Value)\\r\\n| project tostring(Column), RemediationStep =Value, URLtoOpen=url_,toint(r)\\r\\n| order by Column desc, r asc \\r\\n\\r\\n\\r\\n\\r\\n\\r\\n\\r\\n\\r\\n\",\"size\":0,\"title\":\"Incident and Remediations - Advanced View (from Watchlist, \\\"SocRA\\\") Incident Number:{IncidentNumber}\",\"timeContext\":{\"durationMs\":2592000000},\"timeContextFromParameter\":\"TimeRange\",\"showExportToExcel\":true,\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\",\"visualization\":\"table\",\"gridSettings\":{\"formatters\":[{\"columnMatch\":\"Remediation\",\"formatter\":18,\"formatOptions\":{\"linkTarget\":\"CellDetails\",\"linkIsContextBlade\":true,\"thresholdsOptions\":\"colors\",\"thresholdsGrid\":[{\"operator\":\"==\",\"thresholdValue\":\"Low\",\"representation\":\"green\",\"text\":\"{0}{1}\"},{\"operator\":\"==\",\"thresholdValue\":\"Medium\",\"representation\":\"orange\",\"text\":\"{0}{1}\"},{\"operator\":\"==\",\"thresholdValue\":\"High\",\"representation\":\"redBright\",\"text\":\"{0}{1}\"},{\"operator\":\"Default\",\"text\":\"{0}{1}\"}]},\"tooltipFormat\":{\"tooltip\":\"Click to see more details about the Remediation step\"}},{\"columnMatch\":\"URLtoOpen\",\"formatter\":7,\"formatOptions\":{\"linkTarget\":\"Url\"}},{\"columnMatch\":\"Severity\",\"formatter\":18,\"formatOptions\":{\"thresholdsOptions\":\"colors\",\"thresholdsGrid\":[{\"operator\":\"==\",\"thresholdValue\":\"High\",\"representation\":\"redBright\",\"text\":\"{0}{1}\"},{\"operator\":\"==\",\"thresholdValue\":\"Medium\",\"representation\":\"orange\",\"text\":\"{0}{1}\"},{\"operator\":\"==\",\"thresholdValue\":\"Low\",\"representation\":\"green\",\"text\":\"{0}{1}\"},{\"operator\":\"==\",\"thresholdValue\":\"Informational\",\"representation\":\"gray\",\"text\":\"{0}{1}\"},{\"operator\":\"Default\",\"representation\":\"blue\",\"text\":\"{0}{1}\"}]}},{\"columnMatch\":\"url_\",\"formatter\":7,\"formatOptions\":{\"linkTarget\":\"Url\",\"linkLabel\":\"\",\"linkIsContextBlade\":false},\"tooltipFormat\":{\"tooltip\":\"Open this link (in another Tab)\"}},{\"columnMatch\":\"entityList\",\"formatter\":7,\"formatOptions\":{\"linkTarget\":\"CellDetails\",\"linkIsContextBlade\":true}}],\"filter\":true,\"sortBy\":[{\"itemKey\":\"r\",\"sortOrder\":1}],\"labelSettings\":[{\"columnId\":\"r\",\"label\":\"AlertOrder\"}]},\"sortBy\":[{\"itemKey\":\"r\",\"sortOrder\":1}],\"tileSettings\":{\"titleContent\":{\"columnMatch\":\"IncidentNumber\"},\"subtitleContent\":{\"columnMatch\":\"Title\"},\"leftContent\":{\"columnMatch\":\"Remediation\"},\"secondaryContent\":{\"columnMatch\":\"url_\",\"formatter\":7,\"formatOptions\":{\"linkTarget\":\"Url\",\"linkIsContextBlade\":false}},\"showBorder\":false},\"graphSettings\":{\"type\":2,\"topContent\":{\"columnMatch\":\"IncidentNumber\"},\"leftContent\":{\"columnMatch\":\"Title\"},\"centerContent\":{\"columnMatch\":\"url_\",\"formatter\":7,\"formatOptions\":{\"linkTarget\":\"Url\"}},\"hivesContent\":{\"columnMatch\":\"Title\",\"formatter\":7,\"formatOptions\":{\"linkTarget\":\"Url\"}},\"nodeIdField\":\"Remediation\",\"sourceIdField\":\"Title\",\"targetIdField\":\"Remediation\",\"graphOrientation\":3,\"showOrientationToggles\":false,\"staticNodeSize\":100,\"colorSettings\":{\"nodeColorField\":\"url_\",\"type\":1,\"colorPalette\":\"default\"},\"groupByField\":\"Title\",\"hivesMargin\":5}},\"conditionalVisibility\":{\"parameterName\":\"watchListExists\",\"comparison\":\"isNotEqualTo\"},\"name\":\"query - advanced View\"}]},\"name\":\"RecActions\"},{\"type\":1,\"content\":{\"json\":\"## Incident Entities\"},\"name\":\"text - 2 - Copy - Copy - Copy - Copy - Copy\"},{\"type\":3,\"content\":{\"version\":\"KqlItem/1.0\",\"query\":\"{\\\"version\\\":\\\"ARMEndpoint/1.0\\\",\\\"method\\\":\\\"POST\\\",\\\"path\\\":\\\"/subscriptions/{Subscription:id}/resourceGroups/{resourceGroup}/providers/Microsoft.OperationalInsights/workspaces/{Workspace:name}/providers/Microsoft.SecurityInsights/incidents/{IncidentID}/entities\\\",\\\"urlParams\\\":[{\\\"key\\\":\\\"api-version\\\",\\\"value\\\":\\\"2021-04-01\\\"}],\\\"batchDisabled\\\":false,\\\"transformers\\\":[{\\\"type\\\":\\\"jsonpath\\\",\\\"settings\\\":{\\\"tablePath\\\":\\\"$.metaData\\\"}}]}\\r\\n\",\"size\":2,\"noDataMessage\":\"No entities were found\",\"noDataMessageStyle\":4,\"queryType\":12,\"visualization\":\"piechart\",\"tileSettings\":{\"titleContent\":{\"columnMatch\":\"entityKind\",\"formatter\":12,\"formatOptions\":{\"palette\":\"blue\"}},\"leftContent\":{\"columnMatch\":\"count\",\"formatter\":1,\"numberFormat\":{\"unit\":0,\"options\":{\"style\":\"decimal\"}}},\"showBorder\":false,\"sortCriteriaField\":\"Order\",\"sortOrderField\":1,\"size\":\"auto\"}},\"customWidth\":\"30\",\"name\":\"Entities\"},{\"type\":3,\"content\":{\"version\":\"KqlItem/1.0\",\"query\":\"{\\\"version\\\":\\\"ARMEndpoint/1.0\\\",\\\"method\\\":\\\"POST\\\",\\\"path\\\":\\\"/subscriptions/{Subscription:id}/resourceGroups/{resourceGroup}/providers/Microsoft.OperationalInsights/workspaces/{Workspace:name}/providers/Microsoft.SecurityInsights/incidents/{IncidentID}/entities\\\",\\\"urlParams\\\":[{\\\"key\\\":\\\"api-version\\\",\\\"value\\\":\\\"2021-04-01\\\"}],\\\"batchDisabled\\\":false,\\\"transformers\\\":[{\\\"type\\\":\\\"jsonpath\\\",\\\"settings\\\":{\\\"tablePath\\\":\\\"$.entities\\\",\\\"columns\\\":[{\\\"path\\\":\\\"$.kind\\\",\\\"columnid\\\":\\\"Kind\\\"},{\\\"path\\\":\\\"$.properties.friendlyName\\\",\\\"columnid\\\":\\\"Name\\\"}]}}]}\\r\\n\",\"size\":2,\"noDataMessage\":\"No entities were found\",\"noDataMessageStyle\":4,\"queryType\":12,\"visualization\":\"table\",\"gridSettings\":{\"hierarchySettings\":{\"treeType\":1,\"groupBy\":[\"Kind\"],\"expandTopLevel\":true}},\"tileSettings\":{\"titleContent\":{\"columnMatch\":\"kind\",\"formatter\":1,\"numberFormat\":{\"unit\":0,\"options\":{\"style\":\"decimal\"}}},\"subtitleContent\":{\"columnMatch\":\"properties\",\"formatter\":1},\"showBorder\":false,\"sortCriteriaField\":\"kind\",\"sortOrderField\":1,\"size\":\"auto\"}},\"customWidth\":\"70\",\"name\":\"Entities List\"},{\"type\":12,\"content\":{\"version\":\"NotebookGroup/1.0\",\"groupType\":\"editable\",\"items\":[{\"type\":1,\"content\":{\"json\":\"## Recent activities\"},\"name\":\"text - 2 - Copy - Copy - Copy - Copy\"},{\"type\":3,\"content\":{\"version\":\"KqlItem/1.0\",\"query\":\"SecurityIncident\\n| where Severity in ({Severity}) or '{Severity:label}' == \\\"All\\\"\\n| extend Tactics = todynamic(AdditionalData.tactics)\\n| where Tactics in ({Tactics}) or '{Tactics:label}' == \\\"All\\\"\\n| extend Owner = todynamic(Owner.assignedTo) \\n| where Owner in ({Owner}) or '{Owner:label}' == \\\"All\\\"\\n| extend Product = todynamic((parse_json(tostring(AdditionalData.alertProductNames))[0])) \\n| where Product in ({Product}) or '{Product:label}' == \\\"All\\\"\\n| where IncidentNumber == '{IncidentNumber}' or '{IncidentNumber}' == ''\\n| order by LastModifiedTime \\n| project LastModifiedTime,IncidentNumber, Title, Product, IncidentUrl, ModifiedBy,Status, Severity, Owner\\n| take 250\\n\\n\\n\",\"size\":1,\"timeContext\":{\"durationMs\":2592000000},\"timeContextFromParameter\":\"TimeRange\",\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\",\"crossComponentResources\":[\"{Workspace}\"],\"visualization\":\"table\",\"gridSettings\":{\"formatters\":[{\"columnMatch\":\"IncidentUrl\",\"formatter\":7,\"formatOptions\":{\"linkTarget\":\"Url\",\"linkLabel\":\"Go to incident >\"}}],\"labelSettings\":[{\"columnId\":\"LastModifiedTime\",\"label\":\"Last Modified Time\"},{\"columnId\":\"IncidentNumber\",\"label\":\"Incident Number\"},{\"columnId\":\"IncidentUrl\",\"label\":\"Link to incident\"},{\"columnId\":\"ModifiedBy\",\"label\":\"Modified By\"}]},\"tileSettings\":{\"showBorder\":false,\"titleContent\":{\"columnMatch\":\"Column1\",\"formatter\":1},\"leftContent\":{\"columnMatch\":\"IncidentNumber\",\"formatter\":12,\"formatOptions\":{\"palette\":\"auto\"},\"numberFormat\":{\"unit\":17,\"options\":{\"maximumSignificantDigits\":3,\"maximumFractionDigits\":2}}}}},\"name\":\"query - 2 - Copy - Copy - Copy - Copy\"}]},\"name\":\"Incidents tactic over time\"},{\"type\":12,\"content\":{\"version\":\"NotebookGroup/1.0\",\"groupType\":\"editable\",\"items\":[{\"type\":1,\"content\":{\"json\":\"## Incident's Comments\"},\"name\":\"text - 2 - Copy - Copy - Copy - Copy\"},{\"type\":3,\"content\":{\"version\":\"KqlItem/1.0\",\"query\":\"SecurityIncident\\n| where IncidentNumber == '{IncidentNumber}' or '{IncidentNumber}' == ''\\n| summarize arg_max(TimeGenerated,Status, Severity, Owner, AdditionalData, IncidentUrl, Comments) by IncidentNumber\\n| where Severity in ({Severity}) or '{Severity:label}' == \\\"All\\\"\\n| extend Tactics = todynamic(AdditionalData.tactics)\\n| where Tactics in ({Tactics}) or '{Tactics:label}' == \\\"All\\\"\\n| extend Owner = todynamic(Owner.assignedTo) \\n| where Owner in ({Owner}) or '{Owner:label}' == \\\"All\\\"\\n| extend Product = todynamic((parse_json(tostring(AdditionalData.alertProductNames))[0])) \\n| where Product in ({Product}) or '{Product:label}' == \\\"All\\\"\\n| mv-expand Comments to typeof(string)\\n| extend Message = extract('message\\\":\\\"(.*?)\\\"',1,tostring(Comments)), Author = extract('name\\\":\\\"(.*?)\\\"',1,tostring(Comments)), CreatedTimeUTC = extract('createdTimeUtc\\\":\\\"(.*?)\\\"',1,tostring(Comments))\\n| project CreatedTimeUTC, Author, Message, IncidentNumber, Owner\\n| take 250\\n\\n\\n\",\"size\":1,\"timeContext\":{\"durationMs\":2592000000},\"timeContextFromParameter\":\"TimeRange\",\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\",\"crossComponentResources\":[\"{Workspace}\"],\"visualization\":\"table\",\"gridSettings\":{\"formatters\":[{\"columnMatch\":\"IncidentUrl\",\"formatter\":7,\"formatOptions\":{\"linkTarget\":\"Url\",\"linkLabel\":\"Go to incident >\"}}],\"sortBy\":[{\"itemKey\":\"IncidentNumber\",\"sortOrder\":2}]},\"sortBy\":[{\"itemKey\":\"IncidentNumber\",\"sortOrder\":2}],\"tileSettings\":{\"showBorder\":false,\"titleContent\":{\"columnMatch\":\"Column1\",\"formatter\":1},\"leftContent\":{\"columnMatch\":\"IncidentNumber\",\"formatter\":12,\"formatOptions\":{\"palette\":\"auto\"},\"numberFormat\":{\"unit\":17,\"options\":{\"maximumSignificantDigits\":3,\"maximumFractionDigits\":2}}}}},\"name\":\"query - 2 - Copy - Copy - Copy - Copy\"}]},\"name\":\"Incidents tactic over time - Copy\"},{\"type\":12,\"content\":{\"version\":\"NotebookGroup/1.0\",\"groupType\":\"editable\",\"items\":[{\"type\":1,\"content\":{\"json\":\"## Time to closure\\r\\n\"},\"name\":\"text - 2 - Copy\"},{\"type\":1,\"content\":{\"json\":\"The mean time between the incident creation and first modification by owner\\r\\n\\r\\n\",\"style\":\"info\"},\"conditionalVisibility\":{\"parameterName\":\"Help\",\"comparison\":\"isEqualTo\",\"value\":\"Yes\"},\"name\":\"text - 2 - Copy - Copy\"},{\"type\":3,\"content\":{\"version\":\"KqlItem/1.0\",\"query\":\"SecurityIncident\\n| where IncidentNumber == '{IncidentNumber}' or '{IncidentNumber}' == ''\\n| where CreatedTime >= {TimeRange:start} and CreatedTime <= {TimeRange:end}\\n| where Severity in ({Severity}) or '{Severity:label}' == \\\"All\\\"\\n| extend Tactics = todynamic(AdditionalData.tactics)\\n| where Tactics in ({Tactics}) or '{Tactics:label}' == \\\"All\\\"\\n| extend Owner = todynamic(Owner.assignedTo) \\n| where Owner in ({Owner}) or '{Owner:label}' == \\\"All\\\"\\n| extend Product = todynamic((parse_json(tostring(AdditionalData.alertProductNames))[0])) \\n| where Product in ({Product}) or '{Product:label}' == \\\"All\\\"\\n| summarize arg_max(TimeGenerated,Title, ClosedTime, CreatedTime) by IncidentNumber \\n| where isnotnull(ClosedTime)\\n| extend TimeToClosure = (ClosedTime - CreatedTime)/1h\\n\",\"size\":1,\"timeContext\":{\"durationMs\":2592000000},\"timeContextFromParameter\":\"TimeRange\",\"exportFieldName\":\"series\",\"exportParameterName\":\"Status\",\"exportDefaultValue\":\"All\",\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\",\"crossComponentResources\":[\"{Workspace}\"],\"visualization\":\"tiles\",\"tileSettings\":{\"titleContent\":{\"columnMatch\":\"IncidentNumber\",\"formatter\":1},\"leftContent\":{\"columnMatch\":\"TimeToClosure\",\"formatter\":12,\"formatOptions\":{\"palette\":\"auto\"},\"numberFormat\":{\"unit\":26,\"options\":{\"style\":\"decimal\",\"useGrouping\":false,\"maximumFractionDigits\":3}}},\"showBorder\":false}},\"name\":\"query - 2 - Copy\"}]},\"name\":\"Time to mitigate\",\"styleSettings\":{\"margin\":\"0\",\"padding\":\"0\"}},{\"type\":12,\"content\":{\"version\":\"NotebookGroup/1.0\",\"groupType\":\"editable\",\"items\":[{\"type\":1,\"content\":{\"json\":\"## Time to triage \\r\\n\"},\"name\":\"text - 2 - Copy\"},{\"type\":1,\"content\":{\"json\":\"The mean time between the incident creation and first modification by owner\\r\\n\\r\\n\",\"style\":\"info\"},\"conditionalVisibility\":{\"parameterName\":\"Help\",\"comparison\":\"isEqualTo\",\"value\":\"Yes\"},\"name\":\"text - 2 - Copy - Copy\"},{\"type\":3,\"content\":{\"version\":\"KqlItem/1.0\",\"query\":\"SecurityIncident\\n| where IncidentNumber == '{IncidentNumber}' or '{IncidentNumber}' == ''\\n| where CreatedTime >= {TimeRange:start} and CreatedTime <= {TimeRange:end}\\n| where Severity in ({Severity}) or '{Severity:label}' == \\\"All\\\"\\n| extend Tactics = todynamic(AdditionalData.tactics)\\n| where Tactics in ({Tactics}) or '{Tactics:label}' == \\\"All\\\"\\n| extend Owner = todynamic(Owner.assignedTo) \\n| where Owner in ({Owner}) or '{Owner:label}' == \\\"All\\\"\\n| extend Product = todynamic((parse_json(tostring(AdditionalData.alertProductNames))[0])) \\n| where Product in ({Product}) or '{Product:label}' == \\\"All\\\"\\n| where ModifiedBy != 'Incident created from alert'\\n| summarize arg_max(LastModifiedTime,*) by IncidentNumber \\n| where isnotnull(FirstModifiedTime)\\n| extend TimeToTriage = FirstModifiedTime - CreatedTime\\n| project IncidentNumber, MeanToTriage = TimeToTriage/1h\\n\",\"size\":1,\"timeContext\":{\"durationMs\":94608000000,\"endTime\":\"2023-06-01T17:13:00Z\"},\"exportFieldName\":\"series\",\"exportParameterName\":\"Status\",\"exportDefaultValue\":\"All\",\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\",\"crossComponentResources\":[\"{Workspace}\"],\"visualization\":\"tiles\",\"tileSettings\":{\"titleContent\":{\"columnMatch\":\"IncidentNumber\",\"formatter\":1},\"leftContent\":{\"columnMatch\":\"MeanToTriage\",\"formatter\":12,\"formatOptions\":{\"palette\":\"auto\"},\"numberFormat\":{\"unit\":26,\"options\":{\"style\":\"decimal\",\"useGrouping\":false,\"maximumFractionDigits\":3}}},\"showBorder\":false}},\"name\":\"query - 2 - Copy\"}]},\"name\":\"Time to close\",\"styleSettings\":{\"margin\":\"0\",\"padding\":\"0\"}}],\"fromTemplateId\":\"sentinel-IncidentOverview\",\"$schema\":\"https://github.com/Microsoft/Application-Insights-Workbooks/blob/master/schema/workbook.json\"}\r\n", + "serializedData": "{\"version\":\"Notebook/1.0\",\"items\":[{\"type\":1,\"content\":{\"json\":\"# Incident Overview\"},\"customWidth\":\"35\",\"name\":\"Headline\"},{\"type\":9,\"content\":{\"version\":\"KqlParameterItem/1.0\",\"parameters\":[{\"id\":\"9a199167-2dde-49dd-8f01-23e9d1fa8151\",\"version\":\"KqlParameterItem/1.0\",\"name\":\"InternalWSs\",\"type\":1,\"isRequired\":true,\"query\":\"SecurityIncident\\r\\n| take 1\\r\\n| parse IncidentUrl with * \\\"/workspaces/\\\" Workspace \\\"/\\\" *\\r\\n| project Workspace\",\"isHiddenWhenLocked\":true,\"timeContext\":{\"durationMs\":2592000000},\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\"},{\"id\":\"7806fefd-432f-4828-9756-8c0be5c08d07\",\"version\":\"KqlParameterItem/1.0\",\"name\":\"InternalSub\",\"type\":1,\"isRequired\":true,\"query\":\"SecurityIncident\\r\\n| take 1\\r\\n| parse IncidentUrl with * \\\"/subscriptions/\\\" subscriptions \\\"/\\\" *\\r\\n| project subscriptions\",\"isHiddenWhenLocked\":true,\"timeContext\":{\"durationMs\":2592000000},\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\"},{\"id\":\"55d3ab63-6e1f-4d02-8d9e-2225526689c7\",\"version\":\"KqlParameterItem/1.0\",\"name\":\"Subscription\",\"type\":6,\"isRequired\":true,\"query\":\"summarize by subscriptionId\\r\\n| project subscriptionId, Subscription=strcat(\\\"/subscriptions/\\\", subscriptionId)\\r\\n| extend selected = iff(subscriptionId =~ '{InternalSub}', true, false)\\r\\n\",\"crossComponentResources\":[\"value::all\"],\"typeSettings\":{\"showDefault\":false},\"timeContext\":{\"durationMs\":0},\"timeContextFromParameter\":\"TimeRange\",\"queryType\":1,\"resourceType\":\"microsoft.resourcegraph/resources\",\"value\":\"\"},{\"id\":\"95a45501-31b5-4ea2-bcb3-eb208e0080e2\",\"version\":\"KqlParameterItem/1.0\",\"name\":\"Workspace\",\"type\":5,\"isRequired\":true,\"multiSelect\":true,\"quote\":\"'\",\"delimiter\":\",\",\"query\":\"//resources | where type =~ 'Microsoft.operationsmanagement/solutions' | where name contains //'SecurityInsights' | project id //= tostring(properties.workspaceResourceId)\\r\\n\\r\\nwhere type =~ 'microsoft.operationalinsights/workspaces'\\r\\n| project value =id, label = name, selected = iff(name =~ '{InternalWSs}', true, false)\\r\\n\\r\\n\\r\\n\",\"crossComponentResources\":[\"value::all\"],\"typeSettings\":{\"showDefault\":false},\"timeContextFromParameter\":\"TimeRange\",\"queryType\":1,\"resourceType\":\"microsoft.resourcegraph/resources\"},{\"id\":\"7d597ad7-4a2a-45ed-a4fe-7ee32de0fc22\",\"version\":\"KqlParameterItem/1.0\",\"name\":\"TimeRange\",\"label\":\"Incident Creation Time\",\"type\":4,\"isRequired\":true,\"value\":{\"durationMs\":2592000000},\"typeSettings\":{\"selectableValues\":[{\"durationMs\":14400000},{\"durationMs\":43200000},{\"durationMs\":86400000},{\"durationMs\":172800000},{\"durationMs\":259200000},{\"durationMs\":604800000},{\"durationMs\":1209600000},{\"durationMs\":2592000000}],\"allowCustom\":true}},{\"id\":\"3a87d4f7-42cc-4c62-b543-6b5d9ab8cf27\",\"version\":\"KqlParameterItem/1.0\",\"name\":\"Severity\",\"type\":2,\"isRequired\":true,\"multiSelect\":true,\"quote\":\"'\",\"delimiter\":\",\",\"query\":\"SecurityIncident\\r\\n| summarize Count = count(IncidentNumber) by Severity\\r\\n| project Value = Severity, Label = strcat(Severity, \\\": \\\", Count)\",\"value\":[\"value::all\"],\"typeSettings\":{\"additionalResourceOptions\":[\"value::all\"],\"selectAllValue\":\"*\"},\"timeContext\":{\"durationMs\":0},\"timeContextFromParameter\":\"TimeRange\",\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\"},{\"id\":\"81085d3a-5aca-488e-b7c6-ecf1167e59f7\",\"version\":\"KqlParameterItem/1.0\",\"name\":\"Tactics\",\"type\":2,\"isRequired\":true,\"multiSelect\":true,\"quote\":\"'\",\"delimiter\":\",\",\"query\":\"SecurityIncident\\r\\n| extend Tactics = todynamic(AdditionalData.tactics)\\r\\n| mv-expand Tactics to typeof(string)\\r\\n| summarize Count=count(IncidentNumber) by Tactics\\r\\n| project Value = Tactics, Label = strcat(Tactics, \\\": \\\", Count)\",\"value\":[\"value::all\"],\"typeSettings\":{\"additionalResourceOptions\":[\"value::all\"],\"selectAllValue\":\"*\"},\"timeContext\":{\"durationMs\":0},\"timeContextFromParameter\":\"TimeRange\",\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\"},{\"id\":\"0f9efb0d-ac34-41d0-8a19-165840eb2a71\",\"version\":\"KqlParameterItem/1.0\",\"name\":\"Owner\",\"type\":2,\"isRequired\":true,\"multiSelect\":true,\"quote\":\"'\",\"delimiter\":\",\",\"query\":\"SecurityIncident\\r\\n| extend owner = tostring(Owner.assignedTo) \\r\\n| summarize Count=count(IncidentNumber) by Owner= case(owner==\\\"\\\", \\\"Unassigned\\\",owner)\\r\\n| project Value = Owner, Label = strcat(Owner, \\\": \\\", Count)\",\"value\":[\"value::all\"],\"typeSettings\":{\"additionalResourceOptions\":[\"value::all\"],\"selectAllValue\":\"*\"},\"timeContext\":{\"durationMs\":0},\"timeContextFromParameter\":\"TimeRange\",\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\"},{\"id\":\"cf86113b-59ad-4fc9-aeb7-9b44e230641e\",\"version\":\"KqlParameterItem/1.0\",\"name\":\"Product\",\"label\":\"Product Name\",\"type\":2,\"isRequired\":true,\"multiSelect\":true,\"quote\":\"'\",\"delimiter\":\",\",\"query\":\"SecurityIncident\\r\\n| extend Product = tostring(parse_json(tostring(AdditionalData.alertProductNames))[0]) \\r\\n| summarize Count=count(IncidentNumber) by Product\\r\\n| project Value = Product, Label = strcat(Product, \\\": \\\", Count)\\r\\n\",\"value\":[\"value::all\"],\"typeSettings\":{\"additionalResourceOptions\":[\"value::all\"],\"selectAllValue\":\"*\"},\"timeContext\":{\"durationMs\":0},\"timeContextFromParameter\":\"TimeRange\",\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\"},{\"id\":\"1fea48e7-99b2-4664-8eb6-bd35fc4efaf0\",\"version\":\"KqlParameterItem/1.0\",\"name\":\"resourceGroup\",\"type\":1,\"query\":\"resources\\r\\n| where type =~ 'microsoft.operationalinsights/workspaces'\\r\\n| where id == \\\"{Workspace:lable}\\\"\\r\\n| project resourceGroup\",\"crossComponentResources\":[\"{Subscription}\"],\"isHiddenWhenLocked\":true,\"queryType\":1,\"resourceType\":\"microsoft.resourcegraph/resources\"},{\"id\":\"2908f26a-6238-43ed-9aa0-546c9041d918\",\"version\":\"KqlParameterItem/1.0\",\"name\":\"Help\",\"label\":\"Show Help\",\"type\":10,\"isRequired\":true,\"jsonData\":\"[{ \\\"value\\\": \\\"Yes\\\", \\\"label\\\": \\\"Yes\\\"},\\r\\n {\\\"value\\\": \\\"No\\\", \\\"label\\\": \\\"No\\\", \\\"selected\\\":true }]\",\"timeContext\":{\"durationMs\":0},\"timeContextFromParameter\":\"TimeRange\"}],\"style\":\"above\",\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\"},\"customWidth\":\"100\",\"name\":\"parameters - 6\"},{\"type\":1,\"content\":{\"json\":\"The Incident Overview workbook is designed to assist in triaging and investigation by providing in-depth information about the incident, including:\\r\\n* General information\\r\\n* Entity data\\r\\n* Triage time (time between incident creation and first response)\\r\\n* Mitigation time (time between incident creation and closing)\\r\\n* Comments\\r\\n* Remediation information from the Alerts or from a Watchlist - setup readme: https://github.com/Azure/Azure-Sentinel/wiki/SOC-Process-Framework\\r\\n\\r\\nCustomize this workbook by saving and editing it. \\r\\nYou can reach this workbook template from the incidents panel as well. Once you have customized it, the link from the incident panel will open the customized workbook instead of the template.\",\"style\":\"info\"},\"conditionalVisibility\":{\"parameterName\":\"Help\",\"comparison\":\"isEqualTo\",\"value\":\"Yes\"},\"customWidth\":\"100\",\"name\":\"Info\"},{\"type\":9,\"content\":{\"version\":\"KqlParameterItem/1.0\",\"crossComponentResources\":[\"{Workspace}\"],\"parameters\":[{\"id\":\"9aec751b-07bd-43ba-80b9-f711887dce45\",\"version\":\"KqlParameterItem/1.0\",\"name\":\"IncidentNumber\",\"label\":\"Incident Number\",\"type\":1,\"isRequired\":true,\"value\":\"\",\"timeContext\":{\"durationMs\":0},\"timeContextFromParameter\":\"TimeRange\"},{\"id\":\"9ef1a34d-5c8e-42ad-b1d7-1353e0091060\",\"version\":\"KqlParameterItem/1.0\",\"name\":\"testRemediation\",\"type\":1,\"isRequired\":true,\"query\":\"SecurityIncident\\r\\n| where IncidentNumber == '{IncidentNumber:value}' \\r\\n| summarize arg_max(LastModifiedTime,*) by tostring(IncidentNumber)\\r\\n| extend Alerts = extract(\\\"\\\\\\\\[(.*?)\\\\\\\\]\\\", 1, tostring(AlertIds))\\r\\n| mv-expand AlertIds to typeof(string)\\r\\n| join \\r\\n(\\r\\n SecurityAlert\\r\\n | extend Remediation_ = parse_json(RemediationSteps)\\r\\n | mv-expand Remediation_\\r\\n) on $left.AlertIds == $right.SystemAlertId\\r\\n| summarize Remediation=make_set(tostring(Remediation_)) by IncidentNumber, Title, Severity\\r\\n| mv-expand Remediation to typeof(string)\\r\\n| project value=iif(isempty(Remediation),'0','1')\",\"crossComponentResources\":[\"{Workspace}\"],\"isHiddenWhenLocked\":true,\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\"},{\"id\":\"e5d4131c-43a9-4f92-87c9-dbf647530c9c\",\"version\":\"KqlParameterItem/1.0\",\"name\":\"watchListExists\",\"type\":1,\"isRequired\":true,\"query\":\"_GetWatchlist('SocRA')\\r\\n| limit 1\",\"crossComponentResources\":[\"{Workspace}\"],\"isHiddenWhenLocked\":true,\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\"}],\"style\":\"pills\",\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\"},\"customWidth\":\"50\",\"name\":\"parameters - 6 - Copy\"},{\"type\":9,\"content\":{\"version\":\"KqlParameterItem/1.0\",\"crossComponentResources\":[\"{Workspace}\"],\"parameters\":[{\"id\":\"f978edb2-9886-4bff-8e12-8280800321c3\",\"version\":\"KqlParameterItem/1.0\",\"name\":\"IncidentID\",\"label\":\"Incident Name\",\"type\":1,\"query\":\"SecurityIncident\\r\\n| where IncidentNumber == {IncidentNumber}\\r\\n| take 1\\r\\n| project IncidentName\\r\\n\",\"crossComponentResources\":[\"{Workspace}\"],\"isHiddenWhenLocked\":true,\"timeContext\":{\"durationMs\":0},\"timeContextFromParameter\":\"TimeRange\",\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\"},{\"id\":\"3b8e6cdd-4578-49cb-a515-1f9dec104fd7\",\"version\":\"KqlParameterItem/1.0\",\"name\":\"RuleId\",\"label\":\"Rule Id\",\"type\":1,\"query\":\"SecurityIncident\\r\\n| where IncidentNumber == {IncidentNumber}\\r\\n| summarize arg_max(TimeGenerated, RelatedAnalyticRuleIds) by IncidentNumber\\r\\n| project RelatedAnalyticRuleIds\",\"crossComponentResources\":[\"{Workspace}\"],\"timeContext\":{\"durationMs\":0},\"timeContextFromParameter\":\"TimeRange\",\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\"}],\"style\":\"pills\",\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\"},\"conditionalVisibility\":{\"parameterName\":\"IncidentNumber\",\"comparison\":\"isEqualTo\",\"value\":\"e\"},\"customWidth\":\"50\",\"name\":\"Invisible parameters\"},{\"type\":1,\"content\":{\"json\":\"## General Incident Information \"},\"customWidth\":\"67\",\"name\":\"Headline - general info\"},{\"type\":3,\"content\":{\"version\":\"KqlItem/1.0\",\"query\":\"let incidentNumberToCheck = '{IncidentNumber}';\\r\\nlet incidentWithNoAlertsQuery = SecurityIncident\\r\\n| where IncidentNumber == incidentNumberToCheck\\r\\n| summarize arg_max(TimeGenerated,CreatedTime,Status, Severity, Owner, AdditionalData, IncidentUrl, Comments, Classification,ClassificationReason, ClassificationComment,Labels, Title, AlertIds) by IncidentNumber\\r\\n| where array_length(AlertIds) == 0\\r\\n| where Severity in ({Severity}) or '{Severity:label}' == \\\"All\\\"\\r\\n| extend Tactics = todynamic(AdditionalData.tactics)\\r\\n| extend Owner = todynamic(Owner.assignedTo), IncidentCreated = format_datetime(CreatedTime,'yy-MM-dd HH:mm')\\r\\n| where Owner in ({Owner}) or '{Owner:label}' == \\\"All\\\"\\r\\n| extend Product = todynamic((parse_json(tostring(AdditionalData.alertProductNames))[0]))\\r\\n| where Product in ({Product}) or '{Product:label}' == \\\"All\\\"\\r\\n| extend Tags = extract_all('labelName\\\":\\\"(.*?)\\\"',tostring(Labels))\\r\\n| extend Owner = case(tostring(Owner)==\\\"\\\", \\\"Unassigned\\\",tostring(Owner)), Products = strcat_array(AdditionalData.alertProductNames, \\\", \\\"), Alerts = tostring(AdditionalData.alertsCount), Bookmarks = tostring(AdditionalData.bookmarksCount), Comments = tostring(AdditionalData.commentsCount), Tactics = strcat_array(AdditionalData.tactics, \\\", \\\"), Labels = strcat_array(Tags, \\\", \\\")\\r\\n;\\r\\nlet incidentWithAlertsQuery = SecurityIncident\\r\\n| where IncidentNumber == incidentNumberToCheck\\r\\n| summarize arg_max(TimeGenerated,CreatedTime,Status, Severity, Owner, AdditionalData, IncidentUrl, Comments, Classification,ClassificationReason, ClassificationComment,Labels, Title, AlertIds) by IncidentNumber\\r\\n| where array_length(AlertIds) > 0\\r\\n| where Severity in ({Severity}) or '{Severity:label}' == \\\"All\\\"\\r\\n| extend Tactics = todynamic(AdditionalData.tactics)\\r\\n| extend Owner = todynamic(Owner.assignedTo), IncidentCreated = format_datetime(CreatedTime,'yy-MM-dd HH:mm')\\r\\n| where Owner in ({Owner}) or '{Owner:label}' == \\\"All\\\"\\r\\n| extend Product = todynamic((parse_json(tostring(AdditionalData.alertProductNames))[0]))\\r\\n| where Product in ({Product}) or '{Product:label}' == \\\"All\\\"\\r\\n| extend Tags = extract_all('labelName\\\":\\\"(.*?)\\\"',tostring(Labels))\\r\\n| extend Owner = case(tostring(Owner)==\\\"\\\", \\\"Unassigned\\\",tostring(Owner)), Products = strcat_array(AdditionalData.alertProductNames, \\\", \\\"), Alerts = tostring(AdditionalData.alertsCount), Bookmarks = tostring(AdditionalData.bookmarksCount), Comments = tostring(AdditionalData.commentsCount), Tactics = strcat_array(AdditionalData.tactics, \\\", \\\"), Labels = strcat_array(Tags, \\\", \\\")\\r\\n| mv-expand AlertIds to typeof(string)\\r\\n| join kind=leftouter\\r\\n(SecurityAlert\\r\\n| summarize arg_max(TimeGenerated,AlertName, Description, AlertType, Entities) by SystemAlertId) on $left.AlertIds == $right.SystemAlertId\\r\\n| summarize AlertName = makelist(AlertName), AlertType = makelist(AlertType) by Comments, Labels, Title, Products, AlertsCount = Alerts, Bookmarks, Status, Severity, Owner, IncidentCreated, ClassificationComment, Classification, ClassificationReason\\r\\n| extend AlertNames = strcat_array(AlertName, \\\", \\\"), AlertTypes = strcat_array(AlertType, \\\", \\\")\\r\\n;\\r\\nincidentWithNoAlertsQuery\\r\\n| union incidentWithAlertsQuery\\r\\n| project packed = pack_all()\\r\\n| mv-expand packed\\r\\n| parse tostring(packed) with * '\\\"' Field '\\\":\\\"' Value '\\\"}'\\r\\n| where Field in ('Severity', 'Owner','Status', 'AlertsCount','Products','Title', 'IncidentCreated', 'Labels','Bookmarks', 'AlertNames', 'AlertsType', 'Classification','ClassificationComment','ClassificationReason')\\r\\n| extend Field1 = case(Field== \\\"IncidentCreated\\\", \\\"Time created\\\", Field == \\\"AlertsCount\\\", \\\"Alert count\\\", Field == \\\"ClassificationComment\\\", \\\"Classification Comment\\\", Field == \\\"ClassificationReason\\\", \\\"Classification Reason\\\", Field == \\\"AlertNames\\\", \\\"Alert Names\\\", Field)\\r\\n| extend Order = case(Field==\\\"Title\\\", 1,Field==\\\"IncidentCreated\\\", 2,Field==\\\"Severity\\\", 3,Field==\\\"Status\\\", 4,Field==\\\"Owner\\\", 5,Field==\\\"Products\\\", 6,Field==\\\"AlertsType\\\",6,Field==\\\"AlertsCount\\\", 7,Field==\\\"Bookmarks\\\", 8, Field==\\\"Labels\\\", 9,Field==\\\"Classification\\\", 10,Field==\\\"ClassificationReason\\\",11, 100)\",\"size\":0,\"noDataMessage\":\"Enter an incident number\",\"noDataMessageStyle\":5,\"timeContext\":{\"durationMs\":2592000000},\"timeContextFromParameter\":\"TimeRange\",\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\",\"crossComponentResources\":[\"{Workspace}\"],\"visualization\":\"tiles\",\"tileSettings\":{\"titleContent\":{\"columnMatch\":\"Field1\",\"formatter\":1,\"numberFormat\":{\"unit\":0,\"options\":{\"style\":\"decimal\",\"useGrouping\":false}}},\"leftContent\":{\"columnMatch\":\"Value\",\"formatter\":18,\"formatOptions\":{\"thresholdsOptions\":\"colors\",\"thresholdsGrid\":[{\"operator\":\"==\",\"thresholdValue\":\"Low\",\"representation\":\"green\",\"text\":\"{0}{1}\"},{\"operator\":\"==\",\"thresholdValue\":\"Medium\",\"representation\":\"orange\",\"text\":\"{0}{1}\"},{\"operator\":\"==\",\"thresholdValue\":\"High\",\"representation\":\"redBright\",\"text\":\"{0}{1}\"},{\"operator\":\"==\",\"thresholdValue\":\"New\",\"representation\":\"blue\",\"text\":\"{0}{1}\"},{\"operator\":\"==\",\"thresholdValue\":\"Active\",\"representation\":\"lightBlue\",\"text\":\"{0}{1}\"},{\"operator\":\"==\",\"thresholdValue\":\"Closed\",\"representation\":\"gray\",\"text\":\"{0}{1}\"},{\"operator\":\"Default\",\"text\":\"{0}{1}\"}]}},\"secondaryContent\":{\"columnMatch\":\"Remediation_\",\"formatter\":7,\"formatOptions\":{\"linkTarget\":\"Url\"}},\"showBorder\":false,\"sortCriteriaField\":\"Order\",\"sortOrderField\":1,\"size\":\"auto\"}},\"customWidth\":\"67\",\"name\":\"general info\"},{\"type\":12,\"content\":{\"version\":\"NotebookGroup/1.0\",\"groupType\":\"editable\",\"items\":[{\"type\":1,\"content\":{\"json\":\"## Closing Classifications of Similar Incidents\"},\"name\":\"Headline - classification\"},{\"type\":1,\"content\":{\"json\":\"Closing classifications of incidents that where created from the same rule in the past month\",\"style\":\"info\"},\"conditionalVisibility\":{\"parameterName\":\"Help\",\"comparison\":\"isEqualTo\",\"value\":\"Yes\"},\"name\":\"Info - Copy\"},{\"type\":3,\"content\":{\"version\":\"KqlItem/1.0\",\"query\":\"let alertText = strcat_array(dynamic([{RuleId}]),\\\",\\\");\\r\\nlet getAmountOfIncidentForRuleId = (classification:string){\\r\\n SecurityIncident\\r\\n | where TimeGenerated >= ago(30d)\\r\\n | where Classification == classification\\r\\n | mv-expand AlertId=AlertIds\\r\\n | extend AlertId=tostring(AlertId)\\r\\n | join (SecurityAlert| where TimeGenerated >=ago(30d)) on $left.AlertId==$right.SystemAlertId\\r\\n | mv-expand RuleId=RelatedAnalyticRuleIds\\r\\n | extend RuleId=iff(ProductName!= 'Azure Sentinel', ProductName,RuleId)\\r\\n | summarize counter=count() by RuleIdentifier=tostring(RuleId)\\r\\n | extend RuleId=RuleIdentifier\\r\\n | project-away RuleIdentifier\\r\\n};\\r\\nlet falsePositiveClassificationTable = getAmountOfIncidentForRuleId(\\\"FalsePositive\\\") | extend FalsePositiveCounter=counter | project-away counter;\\r\\nlet undeterminedClassificationTable = getAmountOfIncidentForRuleId(\\\"Undetermined\\\") | extend UndeterminedCounter=counter | project-away counter;\\r\\nlet benignPositiveClassificationTable = getAmountOfIncidentForRuleId(\\\"BenignPositive\\\") | extend BenignPositiveCounter=counter | project-away counter;\\r\\nlet truePositiveClassificationTable = getAmountOfIncidentForRuleId(\\\"TruePositive\\\") | extend TruePositiveCounter=counter | project-away counter;\\r\\nlet closedIncidentTable = SecurityIncident| where TimeGenerated >= ago(30d) |where Status == \\\"Closed\\\" | mv-expand AlertId=AlertIds| extend AlertId=tostring(AlertId)| join SecurityAlert on $left.AlertId==$right.SystemAlertId| mv-expand RelatedAnalyticRuleIds| extend RuleId= iff(ProductName == 'Azure Sentinel', tostring(RelatedAnalyticRuleIds), ProductName);\\r\\nlet joinByRuleId = (T:(RuleId:string), S:(RuleId:string)){\\r\\n T \\r\\n | join kind=fullouter S on $left.RuleId == $right.RuleId\\r\\n | extend RuleId= iff(RuleId == '', RuleId1,RuleId)\\r\\n | project-away RuleId1\\r\\n};\\r\\njoinByRuleId(joinByRuleId(joinByRuleId(joinByRuleId(falsePositiveClassificationTable, undeterminedClassificationTable) , benignPositiveClassificationTable), truePositiveClassificationTable),closedIncidentTable)\\r\\n| join kind=leftouter (SecurityAlert\\r\\n| where TimeGenerated >= ago(30d)\\r\\n| where ProductName == 'Azure Sentinel'\\r\\n| extend RuleId = parsejson( tostring(todynamic(ExtendedProperties)['Analytic Rule Ids']))\\r\\n| mv-expand RuleId=RuleId\\r\\n| extend RuleId=tostring(RuleId)\\r\\n| extend RuleName= tostring(todynamic(ExtendedProperties)['Analytic Rule Name'])\\r\\n| project RuleId,RuleName\\r\\n| distinct RuleId,RuleName)\\r\\n on $left.RuleId==$right.RuleId\\r\\n| extend RuleName=iff(isempty(RuleName),RuleId,RuleName)\\r\\n| project-away RuleId1\\r\\n| where alertText has RuleId \\r\\n| summarize dcount(IncidentNumber) by Classification\",\"size\":0,\"noDataMessage\":\"No recent closed incident were found\",\"noDataMessageStyle\":4,\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\",\"crossComponentResources\":[\"{Workspace}\"],\"visualization\":\"piechart\",\"tileSettings\":{\"titleContent\":{\"columnMatch\":\"Field1\",\"formatter\":1,\"numberFormat\":{\"unit\":0,\"options\":{\"style\":\"decimal\"}}},\"leftContent\":{\"columnMatch\":\"Value\",\"formatter\":1},\"showBorder\":false,\"sortCriteriaField\":\"Order\",\"sortOrderField\":1,\"size\":\"auto\"}},\"name\":\"Closing classification\"}]},\"customWidth\":\"33\",\"name\":\"Similar\"},{\"type\":12,\"content\":{\"version\":\"NotebookGroup/1.0\",\"groupType\":\"editable\",\"title\":\"⚠️ Remediations - click to Open\",\"expandable\":true,\"expanded\":true,\"items\":[{\"type\":1,\"content\":{\"json\":\"## Recommended Actions\"},\"name\":\"text - 15\"},{\"type\":1,\"content\":{\"json\":\"### Remediations and Actions Help\\r\\nIn this section of the Workbook, which only is visiable if an Alert has remediation entries, the default Remediations that are contained in the Alert data will be shown (Basic view). \\r\\nNote, not all Alerts have this data. \\r\\nHowever you can provide you own set of Alerts mapped to the Alert \\\"Title\\\". This enhanced feature, uses a Watchlist which has an alias name of: SocRA when you import it (Advanced view).\\r\\n\\r\\n \\r\\n### WatchList Instructions\\r\\n\\r\\n* You must download the Watchlist file called:\\r\\n### SOCAnalystActionsByAlert.csv \\r\\n\\r\\nIcon-security-248(https://github.com/Azure/Azure-Sentinel/blob/master/docs/SOCAnalystActionsByAlert.csv)\\r\\n \\r\\n * Name the Watchlist alias as: \\r\\n ### SocRA \\r\\n * Note: SocRA is case sensitive, you need an uppercase S, R and A.\",\"style\":\"info\"},\"conditionalVisibility\":{\"parameterName\":\"Help\",\"comparison\":\"isEqualTo\",\"value\":\"Yes\"},\"name\":\"text - ra Help text\"},{\"type\":3,\"content\":{\"version\":\"KqlItem/1.0\",\"query\":\"SecurityIncident\\r\\n| where IncidentNumber == '{IncidentNumber:value}' \\r\\n| summarize arg_max(LastModifiedTime,*) by tostring(IncidentNumber)\\r\\n| extend Alerts = extract(\\\"\\\\\\\\[(.*?)\\\\\\\\]\\\", 1, tostring(AlertIds))\\r\\n| mv-expand AlertIds to typeof(string)\\r\\n| join \\r\\n(\\r\\n SecurityAlert\\r\\n | extend Remediation_ = parse_json(RemediationSteps)\\r\\n | mv-expand Remediation_\\r\\n) on $left.AlertIds == $right.SystemAlertId\\r\\n| summarize Remediation=make_set(tostring(Remediation_)) by IncidentNumber, Title, Severity\\r\\n| mv-expand Remediation to typeof(string)\\r\\n// extract URL from the string \\r\\n| extend url_ = iif(Remediation contains 'https://',extract (\\\"https://([a-zA-Z0-9-_://@.?%=&# +]*)\\\",0,tostring(Remediation)),\\\"\\\")\\r\\n| serialize\\r\\n| extend IncidentNumber = iif(prev(IncidentNumber) == IncidentNumber,'',IncidentNumber), Title = iif(prev(Title) == Title,'',Title)\\r\\n\",\"size\":1,\"title\":\"Incident and Remediations - Basic View (from Alert) \",\"timeContext\":{\"durationMs\":2592000000},\"timeContextFromParameter\":\"TimeRange\",\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\",\"crossComponentResources\":[\"{Workspace}\"],\"visualization\":\"table\",\"gridSettings\":{\"formatters\":[{\"columnMatch\":\"Severity\",\"formatter\":18,\"formatOptions\":{\"thresholdsOptions\":\"colors\",\"thresholdsGrid\":[{\"operator\":\"==\",\"thresholdValue\":\"High\",\"representation\":\"redBright\",\"text\":\"{0}{1}\"},{\"operator\":\"==\",\"thresholdValue\":\"Medium\",\"representation\":\"orange\",\"text\":\"{0}{1}\"},{\"operator\":\"==\",\"thresholdValue\":\"Low\",\"representation\":\"green\",\"text\":\"{0}{1}\"},{\"operator\":\"==\",\"thresholdValue\":\"Informational\",\"representation\":\"gray\",\"text\":\"{0}{1}\"},{\"operator\":\"Default\",\"representation\":\"blue\",\"text\":\"{0}{1}\"}]}},{\"columnMatch\":\"Remediation\",\"formatter\":7,\"formatOptions\":{\"linkTarget\":\"CellDetails\",\"linkIsContextBlade\":true},\"tooltipFormat\":{\"tooltip\":\"Click to see more details about the Remediation step\"}},{\"columnMatch\":\"url_\",\"formatter\":7,\"formatOptions\":{\"linkTarget\":\"Url\",\"linkLabel\":\"\",\"linkIsContextBlade\":false},\"tooltipFormat\":{\"tooltip\":\"Open this link (in another Tab)\"}},{\"columnMatch\":\"entityList\",\"formatter\":7,\"formatOptions\":{\"linkTarget\":\"CellDetails\",\"linkIsContextBlade\":true}}],\"labelSettings\":[{\"columnId\":\"url_\",\"label\":\"URL\",\"comment\":\"Show a URL if available \"}]},\"tileSettings\":{\"titleContent\":{\"columnMatch\":\"IncidentNumber\"},\"subtitleContent\":{\"columnMatch\":\"Title\"},\"leftContent\":{\"columnMatch\":\"Remediation\"},\"secondaryContent\":{\"columnMatch\":\"url_\",\"formatter\":7,\"formatOptions\":{\"linkTarget\":\"Url\",\"linkIsContextBlade\":false}},\"showBorder\":false},\"graphSettings\":{\"type\":2,\"topContent\":{\"columnMatch\":\"IncidentNumber\"},\"leftContent\":{\"columnMatch\":\"Title\"},\"centerContent\":{\"columnMatch\":\"url_\",\"formatter\":7,\"formatOptions\":{\"linkTarget\":\"Url\"}},\"hivesContent\":{\"columnMatch\":\"Title\",\"formatter\":7,\"formatOptions\":{\"linkTarget\":\"Url\"}},\"nodeIdField\":\"Remediation\",\"sourceIdField\":\"Title\",\"targetIdField\":\"Remediation\",\"graphOrientation\":3,\"showOrientationToggles\":false,\"staticNodeSize\":100,\"colorSettings\":{\"nodeColorField\":\"url_\",\"type\":1,\"colorPalette\":\"default\"},\"groupByField\":\"Title\",\"hivesMargin\":5}},\"conditionalVisibility\":{\"parameterName\":\"watchListExists\",\"comparison\":\"isEqualTo\"},\"name\":\"query - basic View \"},{\"type\":3,\"content\":{\"version\":\"KqlItem/1.0\",\"query\":\"_GetWatchlist('SocRA')\\r\\n| join\\r\\n (\\r\\n SecurityIncident | where IncidentNumber == '{IncidentNumber}' \\r\\n | summarize arg_max(TimeGenerated, CreatedTime, Status, Severity, Owner, AdditionalData, IncidentUrl, Comments, Classification, ClassificationReason, ClassificationComment, Labels, Title, AlertIds) by IncidentNumber\\r\\n ) on $left.Alert == $right.Title\\r\\n| project-keep A*, Status, Severity //, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19\\r\\n| project-reorder Alert, Status, Severity, A* asc\\r\\n| project-away AlertIds, AdditionalData\\r\\n| evaluate narrow()\\r\\n| extend url_ = iif(Value contains 'https://',extract (\\\"https://([a-zA-Z0-9-_://@.?%=&# +]*)\\\",0,Value),\\\"\\\")\\r\\n| extend r = iif(Column startswith 'A', extract(@\\\"\\\\d+\\\",0,tostring(Column)),\\\"\\\")\\r\\n| where isnotempty(Value)\\r\\n| project tostring(Column), RemediationStep =Value, URLtoOpen=url_,toint(r)\\r\\n| order by Column desc, r asc \\r\\n\\r\\n\\r\\n\\r\\n\\r\\n\\r\\n\\r\\n\",\"size\":0,\"title\":\"Incident and Remediations - Advanced View (from Watchlist, \\\"SocRA\\\") Incident Number:{IncidentNumber}\",\"timeContext\":{\"durationMs\":2592000000},\"timeContextFromParameter\":\"TimeRange\",\"showExportToExcel\":true,\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\",\"visualization\":\"table\",\"gridSettings\":{\"formatters\":[{\"columnMatch\":\"Remediation\",\"formatter\":18,\"formatOptions\":{\"linkTarget\":\"CellDetails\",\"linkIsContextBlade\":true,\"thresholdsOptions\":\"colors\",\"thresholdsGrid\":[{\"operator\":\"==\",\"thresholdValue\":\"Low\",\"representation\":\"green\",\"text\":\"{0}{1}\"},{\"operator\":\"==\",\"thresholdValue\":\"Medium\",\"representation\":\"orange\",\"text\":\"{0}{1}\"},{\"operator\":\"==\",\"thresholdValue\":\"High\",\"representation\":\"redBright\",\"text\":\"{0}{1}\"},{\"operator\":\"Default\",\"text\":\"{0}{1}\"}]},\"tooltipFormat\":{\"tooltip\":\"Click to see more details about the Remediation step\"}},{\"columnMatch\":\"URLtoOpen\",\"formatter\":7,\"formatOptions\":{\"linkTarget\":\"Url\"}},{\"columnMatch\":\"Severity\",\"formatter\":18,\"formatOptions\":{\"thresholdsOptions\":\"colors\",\"thresholdsGrid\":[{\"operator\":\"==\",\"thresholdValue\":\"High\",\"representation\":\"redBright\",\"text\":\"{0}{1}\"},{\"operator\":\"==\",\"thresholdValue\":\"Medium\",\"representation\":\"orange\",\"text\":\"{0}{1}\"},{\"operator\":\"==\",\"thresholdValue\":\"Low\",\"representation\":\"green\",\"text\":\"{0}{1}\"},{\"operator\":\"==\",\"thresholdValue\":\"Informational\",\"representation\":\"gray\",\"text\":\"{0}{1}\"},{\"operator\":\"Default\",\"representation\":\"blue\",\"text\":\"{0}{1}\"}]}},{\"columnMatch\":\"url_\",\"formatter\":7,\"formatOptions\":{\"linkTarget\":\"Url\",\"linkLabel\":\"\",\"linkIsContextBlade\":false},\"tooltipFormat\":{\"tooltip\":\"Open this link (in another Tab)\"}},{\"columnMatch\":\"entityList\",\"formatter\":7,\"formatOptions\":{\"linkTarget\":\"CellDetails\",\"linkIsContextBlade\":true}}],\"filter\":true,\"sortBy\":[{\"itemKey\":\"r\",\"sortOrder\":1}],\"labelSettings\":[{\"columnId\":\"r\",\"label\":\"AlertOrder\"}]},\"sortBy\":[{\"itemKey\":\"r\",\"sortOrder\":1}],\"tileSettings\":{\"titleContent\":{\"columnMatch\":\"IncidentNumber\"},\"subtitleContent\":{\"columnMatch\":\"Title\"},\"leftContent\":{\"columnMatch\":\"Remediation\"},\"secondaryContent\":{\"columnMatch\":\"url_\",\"formatter\":7,\"formatOptions\":{\"linkTarget\":\"Url\",\"linkIsContextBlade\":false}},\"showBorder\":false},\"graphSettings\":{\"type\":2,\"topContent\":{\"columnMatch\":\"IncidentNumber\"},\"leftContent\":{\"columnMatch\":\"Title\"},\"centerContent\":{\"columnMatch\":\"url_\",\"formatter\":7,\"formatOptions\":{\"linkTarget\":\"Url\"}},\"hivesContent\":{\"columnMatch\":\"Title\",\"formatter\":7,\"formatOptions\":{\"linkTarget\":\"Url\"}},\"nodeIdField\":\"Remediation\",\"sourceIdField\":\"Title\",\"targetIdField\":\"Remediation\",\"graphOrientation\":3,\"showOrientationToggles\":false,\"staticNodeSize\":100,\"colorSettings\":{\"nodeColorField\":\"url_\",\"type\":1,\"colorPalette\":\"default\"},\"groupByField\":\"Title\",\"hivesMargin\":5}},\"conditionalVisibility\":{\"parameterName\":\"watchListExists\",\"comparison\":\"isNotEqualTo\"},\"name\":\"query - advanced View\"}]},\"name\":\"RecActions\"},{\"type\":1,\"content\":{\"json\":\"## Incident Entities\"},\"name\":\"text - 2 - Copy - Copy - Copy - Copy - Copy\"},{\"type\":3,\"content\":{\"version\":\"KqlItem/1.0\",\"query\":\"{\\\"version\\\":\\\"ARMEndpoint/1.0\\\",\\\"headers\\\":[],\\\"method\\\":\\\"POST\\\",\\\"path\\\":\\\"/subscriptions/{Subscription:id}/resourceGroups/{resourceGroup}/providers/Microsoft.OperationalInsights/workspaces/{Workspace:name}/providers/Microsoft.SecurityInsights/incidents/{IncidentID}/entities\\\",\\\"urlParams\\\":[{\\\"key\\\":\\\"api-version\\\",\\\"value\\\":\\\"2021-04-01\\\"}],\\\"batchDisabled\\\":false,\\\"transformers\\\":[{\\\"type\\\":\\\"jsonpath\\\",\\\"settings\\\":{\\\"tablePath\\\":\\\"$.metaData\\\",\\\"columns\\\":[]}}]}\\r\\n\",\"size\":2,\"noDataMessage\":\"No entities were found\",\"noDataMessageStyle\":4,\"queryType\":12,\"visualization\":\"piechart\",\"tileSettings\":{\"titleContent\":{\"columnMatch\":\"entityKind\",\"formatter\":12,\"formatOptions\":{\"palette\":\"blue\"}},\"leftContent\":{\"columnMatch\":\"count\",\"formatter\":1,\"numberFormat\":{\"unit\":0,\"options\":{\"style\":\"decimal\"}}},\"showBorder\":false,\"sortCriteriaField\":\"Order\",\"sortOrderField\":1,\"size\":\"auto\"}},\"customWidth\":\"30\",\"name\":\"Entities\"},{\"type\":3,\"content\":{\"version\":\"KqlItem/1.0\",\"query\":\"{\\\"version\\\":\\\"ARMEndpoint/1.0\\\",\\\"headers\\\":[],\\\"method\\\":\\\"POST\\\",\\\"path\\\":\\\"/subscriptions/{Subscription:id}/resourceGroups/{resourceGroup}/providers/Microsoft.OperationalInsights/workspaces/{Workspace:name}/providers/Microsoft.SecurityInsights/incidents/{IncidentID}/entities\\\",\\\"urlParams\\\":[{\\\"key\\\":\\\"api-version\\\",\\\"value\\\":\\\"2021-04-01\\\"}],\\\"batchDisabled\\\":false,\\\"transformers\\\":[{\\\"type\\\":\\\"jsonpath\\\",\\\"settings\\\":{\\\"tablePath\\\":\\\"$.entities\\\",\\\"columns\\\":[{\\\"path\\\":\\\"$.kind\\\",\\\"columnid\\\":\\\"Kind\\\"},{\\\"path\\\":\\\"$.properties.friendlyName\\\",\\\"columnid\\\":\\\"Name\\\"}]}}]}\\r\\n\",\"size\":2,\"noDataMessage\":\"No entities were found\",\"noDataMessageStyle\":4,\"queryType\":12,\"visualization\":\"table\",\"gridSettings\":{\"hierarchySettings\":{\"treeType\":1,\"groupBy\":[\"Kind\"],\"expandTopLevel\":true}},\"tileSettings\":{\"titleContent\":{\"columnMatch\":\"kind\",\"formatter\":1,\"numberFormat\":{\"unit\":0,\"options\":{\"style\":\"decimal\"}}},\"subtitleContent\":{\"columnMatch\":\"properties\",\"formatter\":1},\"showBorder\":false,\"sortCriteriaField\":\"kind\",\"sortOrderField\":1,\"size\":\"auto\"}},\"customWidth\":\"70\",\"name\":\"Entities List\"},{\"type\":12,\"content\":{\"version\":\"NotebookGroup/1.0\",\"groupType\":\"editable\",\"items\":[{\"type\":1,\"content\":{\"json\":\"## Recent activities\"},\"name\":\"text - 2 - Copy - Copy - Copy - Copy\"},{\"type\":3,\"content\":{\"version\":\"KqlItem/1.0\",\"query\":\"SecurityIncident\\n| where Severity in ({Severity}) or '{Severity:label}' == \\\"All\\\"\\n| extend Tactics = todynamic(AdditionalData.tactics)\\n| where Tactics in ({Tactics}) or '{Tactics:label}' == \\\"All\\\"\\n| extend Owner = todynamic(Owner.assignedTo) \\n| where Owner in ({Owner}) or '{Owner:label}' == \\\"All\\\"\\n| extend Product = todynamic((parse_json(tostring(AdditionalData.alertProductNames))[0])) \\n| where Product in ({Product}) or '{Product:label}' == \\\"All\\\"\\n| where IncidentNumber == '{IncidentNumber}' or '{IncidentNumber}' == ''\\n| order by LastModifiedTime \\n| project LastModifiedTime,IncidentNumber, Title, Product, IncidentUrl, ModifiedBy,Status, Severity, Owner\\n| take 250\\n\\n\\n\",\"size\":1,\"timeContext\":{\"durationMs\":2592000000},\"timeContextFromParameter\":\"TimeRange\",\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\",\"crossComponentResources\":[\"{Workspace}\"],\"visualization\":\"table\",\"gridSettings\":{\"formatters\":[{\"columnMatch\":\"IncidentUrl\",\"formatter\":7,\"formatOptions\":{\"linkTarget\":\"Url\",\"linkLabel\":\"Go to incident >\"}}],\"labelSettings\":[{\"columnId\":\"LastModifiedTime\",\"label\":\"Last Modified Time\"},{\"columnId\":\"IncidentNumber\",\"label\":\"Incident Number\"},{\"columnId\":\"IncidentUrl\",\"label\":\"Link to incident\"},{\"columnId\":\"ModifiedBy\",\"label\":\"Modified By\"}]},\"tileSettings\":{\"showBorder\":false,\"titleContent\":{\"columnMatch\":\"Column1\",\"formatter\":1},\"leftContent\":{\"columnMatch\":\"IncidentNumber\",\"formatter\":12,\"formatOptions\":{\"palette\":\"auto\"},\"numberFormat\":{\"unit\":17,\"options\":{\"maximumSignificantDigits\":3,\"maximumFractionDigits\":2}}}}},\"name\":\"query - 2 - Copy - Copy - Copy - Copy\"}]},\"name\":\"Incidents tactic over time\"},{\"type\":12,\"content\":{\"version\":\"NotebookGroup/1.0\",\"groupType\":\"editable\",\"items\":[{\"type\":1,\"content\":{\"json\":\"## Incident's Comments\"},\"name\":\"text - 2 - Copy - Copy - Copy - Copy\"},{\"type\":3,\"content\":{\"version\":\"KqlItem/1.0\",\"query\":\"SecurityIncident\\n| where IncidentNumber == '{IncidentNumber}' or '{IncidentNumber}' == ''\\n| summarize arg_max(TimeGenerated,Status, Severity, Owner, AdditionalData, IncidentUrl, Comments) by IncidentNumber\\n| where Severity in ({Severity}) or '{Severity:label}' == \\\"All\\\"\\n| extend Tactics = todynamic(AdditionalData.tactics)\\n| where Tactics in ({Tactics}) or '{Tactics:label}' == \\\"All\\\"\\n| extend Owner = todynamic(Owner.assignedTo) \\n| where Owner in ({Owner}) or '{Owner:label}' == \\\"All\\\"\\n| extend Product = todynamic((parse_json(tostring(AdditionalData.alertProductNames))[0])) \\n| where Product in ({Product}) or '{Product:label}' == \\\"All\\\"\\n| mv-expand Comments to typeof(string)\\n| extend Message = extract('message\\\":\\\"(.*?)\\\"',1,tostring(Comments)), Author = extract('name\\\":\\\"(.*?)\\\"',1,tostring(Comments)), CreatedTimeUTC = extract('createdTimeUtc\\\":\\\"(.*?)\\\"',1,tostring(Comments))\\n| project CreatedTimeUTC, Author, Message, IncidentNumber, Owner\\n| take 250\\n\\n\\n\",\"size\":1,\"timeContext\":{\"durationMs\":2592000000},\"timeContextFromParameter\":\"TimeRange\",\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\",\"crossComponentResources\":[\"{Workspace}\"],\"visualization\":\"table\",\"gridSettings\":{\"formatters\":[{\"columnMatch\":\"IncidentUrl\",\"formatter\":7,\"formatOptions\":{\"linkTarget\":\"Url\",\"linkLabel\":\"Go to incident >\"}}],\"sortBy\":[{\"itemKey\":\"IncidentNumber\",\"sortOrder\":2}]},\"sortBy\":[{\"itemKey\":\"IncidentNumber\",\"sortOrder\":2}],\"tileSettings\":{\"showBorder\":false,\"titleContent\":{\"columnMatch\":\"Column1\",\"formatter\":1},\"leftContent\":{\"columnMatch\":\"IncidentNumber\",\"formatter\":12,\"formatOptions\":{\"palette\":\"auto\"},\"numberFormat\":{\"unit\":17,\"options\":{\"maximumSignificantDigits\":3,\"maximumFractionDigits\":2}}}}},\"name\":\"query - 2 - Copy - Copy - Copy - Copy\"}]},\"name\":\"Incidents tactic over time - Copy\"},{\"type\":12,\"content\":{\"version\":\"NotebookGroup/1.0\",\"groupType\":\"editable\",\"items\":[{\"type\":1,\"content\":{\"json\":\"## Time to closure\\r\\n\"},\"name\":\"text - 2 - Copy\"},{\"type\":1,\"content\":{\"json\":\"The mean time between the incident creation and first modification by owner\\r\\n\\r\\n\",\"style\":\"info\"},\"conditionalVisibility\":{\"parameterName\":\"Help\",\"comparison\":\"isEqualTo\",\"value\":\"Yes\"},\"name\":\"text - 2 - Copy - Copy\"},{\"type\":3,\"content\":{\"version\":\"KqlItem/1.0\",\"query\":\"SecurityIncident\\n| where IncidentNumber == '{IncidentNumber}' or '{IncidentNumber}' == ''\\n| where CreatedTime >= {TimeRange:start} and CreatedTime <= {TimeRange:end}\\n| where Severity in ({Severity}) or '{Severity:label}' == \\\"All\\\"\\n| extend Tactics = todynamic(AdditionalData.tactics)\\n| where Tactics in ({Tactics}) or '{Tactics:label}' == \\\"All\\\"\\n| extend Owner = todynamic(Owner.assignedTo) \\n| where Owner in ({Owner}) or '{Owner:label}' == \\\"All\\\"\\n| extend Product = todynamic((parse_json(tostring(AdditionalData.alertProductNames))[0])) \\n| where Product in ({Product}) or '{Product:label}' == \\\"All\\\"\\n| summarize arg_max(TimeGenerated,Title, ClosedTime, CreatedTime) by IncidentNumber \\n| where isnotnull(ClosedTime)\\n| extend TimeToClosure = (ClosedTime - CreatedTime)/1h\\n\",\"size\":1,\"timeContext\":{\"durationMs\":2592000000},\"timeContextFromParameter\":\"TimeRange\",\"exportFieldName\":\"series\",\"exportParameterName\":\"Status\",\"exportDefaultValue\":\"All\",\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\",\"crossComponentResources\":[\"{Workspace}\"],\"visualization\":\"tiles\",\"tileSettings\":{\"titleContent\":{\"columnMatch\":\"IncidentNumber\",\"formatter\":1},\"leftContent\":{\"columnMatch\":\"TimeToClosure\",\"formatter\":12,\"formatOptions\":{\"palette\":\"auto\"},\"numberFormat\":{\"unit\":26,\"options\":{\"style\":\"decimal\",\"useGrouping\":false,\"maximumFractionDigits\":3}}},\"showBorder\":false}},\"name\":\"query - 2 - Copy\"}]},\"name\":\"Time to mitigate\",\"styleSettings\":{\"margin\":\"0\",\"padding\":\"0\"}},{\"type\":12,\"content\":{\"version\":\"NotebookGroup/1.0\",\"groupType\":\"editable\",\"items\":[{\"type\":1,\"content\":{\"json\":\"## Time to triage \\r\\n\"},\"name\":\"text - 2 - Copy\"},{\"type\":1,\"content\":{\"json\":\"The mean time between the incident creation and first modification by owner\\r\\n\\r\\n\",\"style\":\"info\"},\"conditionalVisibility\":{\"parameterName\":\"Help\",\"comparison\":\"isEqualTo\",\"value\":\"Yes\"},\"name\":\"text - 2 - Copy - Copy\"},{\"type\":3,\"content\":{\"version\":\"KqlItem/1.0\",\"query\":\"SecurityIncident\\n| where IncidentNumber == '{IncidentNumber}' or '{IncidentNumber}' == ''\\n| where CreatedTime >= {TimeRange:start} and CreatedTime <= {TimeRange:end}\\n| where Severity in ({Severity}) or '{Severity:label}' == \\\"All\\\"\\n| extend Tactics = todynamic(AdditionalData.tactics)\\n| where Tactics in ({Tactics}) or '{Tactics:label}' == \\\"All\\\"\\n| extend Owner = todynamic(Owner.assignedTo) \\n| where Owner in ({Owner}) or '{Owner:label}' == \\\"All\\\"\\n| extend Product = todynamic((parse_json(tostring(AdditionalData.alertProductNames))[0])) \\n| where Product in ({Product}) or '{Product:label}' == \\\"All\\\"\\n| where ModifiedBy != 'Incident created from alert'\\n| summarize arg_max(LastModifiedTime,*) by IncidentNumber \\n| where isnotnull(FirstModifiedTime)\\n| extend TimeToTriage = FirstModifiedTime - CreatedTime\\n| project IncidentNumber, MeanToTriage = TimeToTriage/1h\\n\",\"size\":1,\"timeContext\":{\"durationMs\":94608000000,\"endTime\":\"2023-06-01T17:13:00Z\"},\"exportFieldName\":\"series\",\"exportParameterName\":\"Status\",\"exportDefaultValue\":\"All\",\"queryType\":0,\"resourceType\":\"microsoft.operationalinsights/workspaces\",\"crossComponentResources\":[\"{Workspace}\"],\"visualization\":\"tiles\",\"tileSettings\":{\"titleContent\":{\"columnMatch\":\"IncidentNumber\",\"formatter\":1},\"leftContent\":{\"columnMatch\":\"MeanToTriage\",\"formatter\":12,\"formatOptions\":{\"palette\":\"auto\"},\"numberFormat\":{\"unit\":26,\"options\":{\"style\":\"decimal\",\"useGrouping\":false,\"maximumFractionDigits\":3}}},\"showBorder\":false}},\"name\":\"query - 2 - Copy\"}]},\"name\":\"Time to close\",\"styleSettings\":{\"margin\":\"0\",\"padding\":\"0\"}}],\"fromTemplateId\":\"sentinel-IncidentOverview\",\"$schema\":\"https://github.com/Microsoft/Application-Insights-Workbooks/blob/master/schema/workbook.json\"}\r\n", "version": "1.0", "sourceId": "[variables('workspaceResourceId')]", "category": "sentinel" @@ -7462,7 +8484,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "SecurityOperationsEfficiency Workbook with template version 3.0.3", + "description": "SecurityOperationsEfficiency Workbook with template version 3.0.4", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('workbookVersion3')]", @@ -7491,7 +8513,7 @@ "apiVersion": "2022-01-01-preview", "name": "[concat(parameters('workspace'),'/Microsoft.SecurityInsights/',concat('Workbook-', last(split(variables('workbookId3'),'/'))))]", "properties": { - "description": "@{workbookKey=SecurityOperationsEfficiency; logoFileName=Azure_Sentinel.svg; description=Security operations center managers can view overall efficiency metrics and measures regarding the performance of their team. They can find operations by multiple indicators over time including severity, MITRE tactics, mean time to triage, mean time to resolve and more. The SOC manager can develop a picture of the performance in both general and specific areas over time and use it to improve efficiency.; dataTypesDependencies=System.Object[]; dataConnectorsDependencies=System.Object[]; previewImagesFileNames=System.Object[]; version=1.5.1; title=Security Operations Efficiency; templateRelativePath=SecurityOperationsEfficiency.json; subtitle=; provider=Microsoft}.description", + "description": "@{workbookKey=SecurityOperationsEfficiency; logoFileName=Azure_Sentinel.svg; description=Security operations center managers can view overall efficiency metrics and measures regarding the performance of their team. They can find operations by multiple indicators over time including severity, MITRE tactics, mean time to triage, mean time to resolve and more. The SOC manager can develop a picture of the performance in both general and specific areas over time and use it to improve efficiency.; dataTypesDependencies=System.Object[]; dataConnectorsDependencies=System.Object[]; previewImagesFileNames=System.Object[]; version=1.5.2; title=Security Operations Efficiency; templateRelativePath=SecurityOperationsEfficiency.json; subtitle=; provider=Microsoft}.description", "parentId": "[variables('workbookId3')]", "contentId": "[variables('_workbookContentId3')]", "kind": "Workbook", @@ -7550,7 +8572,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "IncidentTasksWorkbook Workbook with template version 3.0.3", + "description": "IncidentTasksWorkbook Workbook with template version 3.0.4", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('workbookVersion4')]", @@ -7621,12 +8643,12 @@ "apiVersion": "2023-04-01-preview", "location": "[parameters('workspace-location')]", "properties": { - "version": "3.0.3", + "version": "3.0.4", "kind": "Solution", "contentSchemaVersion": "3.0.0", "displayName": "SentinelSOARessentials", "publisherDisplayName": "Microsoft Sentinel, Microsoft Corporation", - "descriptionHtml": "

Note: Please refer to the following before installing the solution:

\n

• Review the solution Release Notes

\n

• There may be known issues pertaining to this Solution, please refer to them before installing.

\n

The Microsoft Sentinel SOAR Essentials solution for Microsoft Sentinel contains Playbooks that can help you get started with basic notification and orchestration scenarios for common use cases. These include Playbooks for sending notifications over email and/or collaboration platforms such as MS Teams, Slack, etc.

\n

Workbooks: 4, Playbooks: 18

\n

Learn more about Microsoft Sentinel | Learn more about Solutions

\n", + "descriptionHtml": "

Note: Please refer to the following before installing the solution:

\n

• Review the solution Release Notes

\n

• There may be known issues pertaining to this Solution, please refer to them before installing.

\n

The Microsoft Sentinel SOAR Essentials solution for Microsoft Sentinel contains Playbooks that can help you get started with basic notification and orchestration scenarios for common use cases. These include Playbooks for sending notifications over email and/or collaboration platforms such as MS Teams, Slack, etc.

\n

Workbooks: 4, Playbooks: 21

\n

Learn more about Microsoft Sentinel | Learn more about Solutions

\n", "contentKind": "Solution", "contentProductId": "[variables('_solutioncontentProductId')]", "id": "[variables('_solutioncontentProductId')]", @@ -7741,6 +8763,21 @@ "contentId": "[variables('_Send-Teams-adaptive-card-on-incident-creation')]", "version": "[variables('playbookVersion18')]" }, + { + "kind": "Playbook", + "contentId": "[variables('_Http-Trigger-Entity-Analyzer')]", + "version": "[variables('playbookVersion19')]" + }, + { + "kind": "Playbook", + "contentId": "[variables('_Incident-Trigger-Entity-Analyzer')]", + "version": "[variables('playbookVersion20')]" + }, + { + "kind": "Playbook", + "contentId": "[variables('_Url-Trigger-Entity-Analyzer')]", + "version": "[variables('playbookVersion21')]" + }, { "kind": "Workbook", "contentId": "[variables('_workbookContentId1')]", diff --git a/Solutions/SentinelSOARessentials/Playbooks/Http-Trigger-Entity-Analyzer/azuredeploy.json b/Solutions/SentinelSOARessentials/Playbooks/Http-Trigger-Entity-Analyzer/azuredeploy.json new file mode 100644 index 00000000000..a43b1c6a9fc --- /dev/null +++ b/Solutions/SentinelSOARessentials/Playbooks/Http-Trigger-Entity-Analyzer/azuredeploy.json @@ -0,0 +1,329 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "metadata": { + "title": "HTTP Trigger Entity Analyzer", + "description": "This playbook is triggered by HTTP POST requests with entity information and performs automated investigation and enrichment of URL and User entities with asynchronous processing.", + "prerequisites": [ + "1. The user deploying this Logic App needs to have a Contributor Role.", + "2. The user has permissions to access Microsoft Sentinel workspace.", + "3. Microsoft Sentinel data connector lake must be enabled in your workspace for entity data collection.", + "4. The SentinelMCP connector is available in your environment.", + "5. You need to provide a valid Sentinel workspace ID during deployment.", + "6. Authentication support for the Entity Analyzer connection includes Entra ID Authentication (OAuth), Service Principal (Application ID and Secret), or Managed Identity (System-assigned or User-assigned)." + ], + "postDeployment": [ + "1. Authenticate the connections: Go to the Logic App → API connections and authenticate SentinelMCP connection with Microsoft Sentinel MCP permissions.", + "2. Authenticate the Entity Analyzer connection using one of the supported methods: Entra ID Auth, Service Principal, or Managed Identity.", + "3. Get the HTTP endpoint URL: Open the Logic App → Go to Logic app designer → Click on the HTTP trigger → Copy the HTTP POST URL.", + "4. The playbook will trigger when POST requests are sent to the HTTP endpoint." + ], + "prerequisitesDeployTemplateFile": "", + "lastUpdateTime": "2025-12-07T00:00:00.000Z", + "entities": [ + "URL", + "Account" + ], + "tags": [ + "Enrichment", + "Utilities", + "Entity Analysis", + "API Integration" + ], + "support": { + "tier": "community" + }, + "author": { + "name": "yaniv shasha" + } + }, + "parameters": { + "PlaybookName": { + "defaultValue": "Http-Trigger-Entity-Analyzer", + "type": "string", + "metadata": { + "description": "Name of the Logic App playbook" + } + }, + "workspaceId": { + "type": "string", + "metadata": { + "description": "Microsoft Sentinel workspace ID" + } + }, + "lookBackDays": { + "defaultValue": 40, + "type": "int", + "metadata": { + "description": "Number of days to look back for entity analysis" + } + } + }, + "variables": { + "SentinelMCPConnectionName": "[concat('SentinelMCP-', parameters('PlaybookName'))]" + }, + "resources": [ + { + "type": "Microsoft.Web/connections", + "apiVersion": "2016-06-01", + "name": "[variables('SentinelMCPConnectionName')]", + "location": "[resourceGroup().location]", + "kind": "V1", + "properties": { + "displayName": "[variables('SentinelMCPConnectionName')]", + "customParameterValues": {}, + "api": { + "id": "[concat('/subscriptions/', subscription().subscriptionId, '/providers/Microsoft.Web/locations/', resourceGroup().location, '/managedApis/sentinelmcp')]" + } + } + }, + { + "type": "Microsoft.Logic/workflows", + "apiVersion": "2017-07-01", + "name": "[parameters('PlaybookName')]", + "location": "[resourceGroup().location]", + "tags": { + "Created By": "ARM Template" + }, + "dependsOn": [ + "[resourceId('Microsoft.Web/connections', variables('SentinelMCPConnectionName'))]" + ], + "properties": { + "state": "Enabled", + "definition": { + "$schema": "https://schema.management.azure.com/providers/Microsoft.Logic/schemas/2016-06-01/workflowdefinition.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "$connections": { + "defaultValue": {}, + "type": "Object" + } + }, + "triggers": { + "When_an_HTTP_request_is_received": { + "type": "Request", + "kind": "Http", + "inputs": { + "schema": { + "type": "object", + "properties": { + "Type": { + "type": "string" + }, + "Value": { + "type": "string" + } + } + } + } + } + }, + "actions": { + "Response_Accepted": { + "runAfter": {}, + "type": "Response", + "kind": "Http", + "inputs": { + "statusCode": 202, + "headers": { + "Retry-After": "10" + }, + "body": { + "status": "Accepted", + "message": "Entity analysis started. Processing in background.", + "runId": "@{workflow().run.name}", + "entityType": "@{triggerBody()?['Type']}", + "entityValue": "@{triggerBody()?['Value']}" + } + } + }, + "Condition": { + "actions": { + "User_Analyzer": { + "type": "ApiConnection", + "inputs": { + "host": { + "connection": { + "name": "@parameters('$connections')['sentinelmcp']['connectionId']" + } + }, + "method": "post", + "body": { + "workspaceId": "[parameters('workspaceId')]", + "lookBackDays": "[parameters('lookBackDays')]", + "properties": { + "entityType": "User", + "userId": "@{triggerBody()?['Value']}" + } + }, + "path": "/aiprimitives/analysis", + "queries": { + "api-version": "2025-08-01-preview" + } + } + }, + "Parse_JSON": { + "runAfter": { + "User_Analyzer": [ + "Succeeded" + ] + }, + "type": "ParseJson", + "inputs": { + "content": "@body('User_Analyzer')", + "schema": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "status": { + "type": "string" + }, + "classification": { + "type": "string" + }, + "analysis": { + "type": "string" + }, + "recommendation": { + "type": "string" + }, + "disclaimer": { + "type": "string" + }, + "dataSourceList": { + "type": "array", + "items": { + "type": "string" + } + }, + "properties": { + "type": "object", + "properties": { + "entityType": { + "type": "string" + } + } + } + } + } + } + } + }, + "runAfter": { + "Response_Accepted": [ + "Succeeded" + ] + }, + "else": { + "actions": { + "Url_Analyzer": { + "type": "ApiConnection", + "inputs": { + "host": { + "connection": { + "name": "@parameters('$connections')['sentinelmcp']['connectionId']" + } + }, + "method": "post", + "body": { + "workspaceId": "[parameters('workspaceId')]", + "lookBackDays": "[parameters('lookBackDays')]", + "properties": { + "entityType": "@{triggerBody()?['Type']}", + "url": "@{triggerBody()?['Value']}" + } + }, + "path": "/aiprimitives/analysis", + "queries": { + "api-version": "2025-08-01-preview" + } + } + }, + "Parse_JSON_1": { + "runAfter": { + "Url_Analyzer": [ + "Succeeded" + ] + }, + "type": "ParseJson", + "inputs": { + "content": "@body('Url_Analyzer')", + "schema": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "status": { + "type": "string" + }, + "classification": { + "type": "string" + }, + "analysis": { + "type": "string" + }, + "recommendation": { + "type": "string" + }, + "disclaimer": { + "type": "string" + }, + "dataSourceList": { + "type": "array", + "items": { + "type": "string" + } + }, + "properties": { + "type": "object", + "properties": { + "entityType": { + "type": "string" + } + } + } + } + } + } + } + } + }, + "expression": { + "and": [ + { + "equals": [ + "@triggerBody()?['Type']", + "User" + ] + } + ] + }, + "type": "If" + } + }, + "outputs": {} + }, + "parameters": { + "$connections": { + "value": { + "sentinelmcp": { + "connectionId": "[resourceId('Microsoft.Web/connections', variables('SentinelMCPConnectionName'))]", + "connectionName": "[variables('SentinelMCPConnectionName')]", + "id": "[concat('/subscriptions/', subscription().subscriptionId, '/providers/Microsoft.Web/locations/', resourceGroup().location, '/managedApis/sentinelmcp')]" + } + } + } + } + } + } + ], + "outputs": { + "logicAppUrl": { + "type": "string", + "value": "[listCallbackURL(concat(resourceId('Microsoft.Logic/workflows', parameters('PlaybookName')), '/triggers/When_an_HTTP_request_is_received'), '2017-07-01').value]" + } + } +} \ No newline at end of file diff --git a/Solutions/SentinelSOARessentials/Playbooks/Http-Trigger-Entity-Analyzer/images/deployment.png b/Solutions/SentinelSOARessentials/Playbooks/Http-Trigger-Entity-Analyzer/images/deployment.png new file mode 100644 index 00000000000..b64a44d486b Binary files /dev/null and b/Solutions/SentinelSOARessentials/Playbooks/Http-Trigger-Entity-Analyzer/images/deployment.png differ diff --git a/Solutions/SentinelSOARessentials/Playbooks/Http-Trigger-Entity-Analyzer/images/logicapp_dis.png b/Solutions/SentinelSOARessentials/Playbooks/Http-Trigger-Entity-Analyzer/images/logicapp_dis.png new file mode 100644 index 00000000000..71008cae7ca Binary files /dev/null and b/Solutions/SentinelSOARessentials/Playbooks/Http-Trigger-Entity-Analyzer/images/logicapp_dis.png differ diff --git a/Solutions/SentinelSOARessentials/Playbooks/Http-Trigger-Entity-Analyzer/readme.md b/Solutions/SentinelSOARessentials/Playbooks/Http-Trigger-Entity-Analyzer/readme.md new file mode 100644 index 00000000000..3109c3221a5 --- /dev/null +++ b/Solutions/SentinelSOARessentials/Playbooks/Http-Trigger-Entity-Analyzer/readme.md @@ -0,0 +1,277 @@ +# Entity Analyzer - HTTP Trigger Playbook + +Activating the 'Deploy' button initiates the deployment of an Azure Logic App integrated with Microsoft Sentinel MCP Actions, utilizing an HTTP request trigger. +The Logic App is configured to run when an HTTP POST request is received with entity information. This Logic App automatically analyzes URL and User entities and provides detailed security insights including classification, analysis results, and recommendations for each entity type. + +The playbook automatically triggers when: +- An HTTP POST request is received with entity information +- External systems need to analyze URLs or User accounts +- Integration with custom applications or workflows is required +- On-demand entity analysis is needed via API calls + +After the analysis is complete, the MCP Entity Analyzer conducts a comprehensive investigation of the entity and returns results asynchronously: +- **Initial Response (202 Accepted)**: Immediate acknowledgment with runId for tracking +- **Background Processing**: Analysis continues without timeout constraints +- **URL Analysis**: Security classification, threat intelligence, and URL reputation analysis +- **User Analysis**: Behavioral analysis, risk assessment, and user activity patterns +- **Classification**: Security classification for each entity +- **Analysis Results**: Detailed security analysis findings +- **Recommendations**: Security recommendations based on the analysis +- **Disclaimer**: AI-generated analysis disclaimer + +### Prerequisites + +Prior to beginning the installation process, it's crucial to confirm that you have met the following prerequisites: +- The user deploying this Logic App needs to have a **Contributor Role** +- The user has permissions to access **Microsoft Sentinel** workspace +- **Microsoft Sentinel data connector lake must be enabled** in your workspace for entity data collection +- The **SentinelMCP connector** is available in your environment +- You need to provide a valid **Sentinel workspace ID** during deployment +- Authentication support for the Entity Analyzer connection includes: + - **Entra ID Authentication** (OAuth) + - **Service Principal** (Application ID and Secret) + - **Managed Identity** (System-assigned or User-assigned) + +### Deployment Files + +This playbook includes two deployment files: + +#### 1. azuredeploy.json +The main ARM template file that defines the Azure resources to be deployed: +- **Logic App Workflow**: The main playbook with HTTP trigger and conditional logic +- **API Connection**: SentinelMCP connection for Microsoft Sentinel MCP integration +- **Workflow Definition**: Complete Logic App structure with triggers, actions, and conditions + +#### 2. azuredeploy.parameters.json +The parameters file that contains the configuration values for deployment. This file should be customized before deployment: + +**Parameters explained:** +- **PlaybookName**: + - **Description**: The name that will be assigned to your Logic App in Azure + - **Default Value**: "Http-Trigger-Entity-Analyzer" + - **Type**: String + - **Usage**: This name will appear in your Azure Portal and be used to identify the Logic App + +- **workspaceId**: + - **Description**: Your Microsoft Sentinel workspace ID (Log Analytics Workspace ID) + - **Default Value**: `` (must be replaced) + - **Type**: String + - **Required**: Yes + - **Usage**: Used to query Sentinel data for entity analysis + - **How to find**: Azure Portal → Microsoft Sentinel → Settings → Workspace settings → Copy the Workspace ID + +- **lookBackDays**: + - **Description**: The number of days to look back when analyzing entity data + - **Default Value**: 40 + - **Type**: Integer + - **Usage**: Determines the time range for historical data analysis (e.g., user sign-in logs, URL access patterns) + - **Recommendation**: 30-90 days for comprehensive analysis, adjust based on your data retention policy + +### Parameters + +During deployment, you'll need to provide: +- **PlaybookName**: Name for the Logic App (default: "Http-Trigger-Entity-Analyzer") +- **workspaceId**: Your Azure Sentinel workspace ID (required) +- **lookBackDays**: Number of days to look back for entity analysis (default: 40 days) + +### Deployment + +**Option 1: Deploy via Azure Portal (Recommended)** + +To deploy the Entity Analyzer HTTP Trigger Logic App using the Deploy to Azure button: +1. Press on the Deploy button below +2. Select your subscription and resource group (use the same tenant where Microsoft Sentinel is configured) +3. Provide your Sentinel workspace ID +4. Configure the lookBackDays parameter if needed (default is 40 days) + + +[![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FSentinelSOARessentials%2FPlaybooks%2FHttp-Trigger-Entity-Analyzer%2Fazuredeploy.json) +[![Deploy to Azure Gov](https://raw.githubusercontent.com/Azure/azure-quickstart-templates/master/1-CONTRIBUTION-GUIDE/images/deploytoazuregov.png)](https://portal.azure.us/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FSentinelSOARessentials%2FPlaybooks%2FHttp-Trigger-Entity-Analyzer%2Fazuredeploy.json) + +![Azure Deployment Interface](images/deployment.png) + +**Option 2: Deploy via PowerShell** + +1. Update the `azuredeploy.parameters.json` file with your values: + - Replace `` with your actual Workspace ID + - Adjust `PlaybookName` if you want a different name + - Modify `lookBackDays` if needed + +2. Run the following PowerShell commands: + +```powershell +# Connect to Azure +Connect-AzAccount + +# Set your variables +$resourceGroupName = "your-resource-group-name" +$location = "westus2" # or your preferred location + +# Deploy the template +New-AzResourceGroupDeployment ` + -ResourceGroupName $resourceGroupName ` + -TemplateFile ".\azuredeploy.json" ` + -TemplateParameterFile ".\azuredeploy.parameters.json" ` + -Verbose +``` + +**Option 3: Deploy via Azure CLI** + +1. Update the `azuredeploy.parameters.json` file with your values + +2. Run the following commands: + +```bash +# Login to Azure +az login + +# Deploy the template +az deployment group create \ + --resource-group your-resource-group-name \ + --template-file azuredeploy.json \ + --parameters azuredeploy.parameters.json +``` + +### Post Deployment + +After successful deployment: +- The Logic App will be automatically enabled and ready to use +- **Authenticate the connections**: Go to the Logic App → API connections and authenticate: + - **SentinelMCP connection**: Authenticate with Microsoft Sentinel MCP permissions + - **Entity Analyzer connection**: Choose one of the supported authentication methods: + - **Entra ID Auth**: Sign in with your Entra ID credentials + - **Service Principal**: Provide Application (Client) ID and Secret + - **Managed Identity**: Configure system-assigned or user-assigned managed identity +- **Get the HTTP endpoint URL**: + 1. Open the Logic App in Azure Portal + 2. Go to "Logic app designer" + 3. Click on the HTTP trigger + 4. Copy the "HTTP POST URL" - this is your API endpoint +- The playbook will trigger when POST requests are sent to the HTTP endpoint +- **View the endpoint in deployment output**: The Logic App callback URL is available in the deployment outputs + +### How It Works + +![Logic App Workflow](images/logicapp_dis.png) + +1. **Trigger**: The Logic App triggers when an HTTP POST request is received +2. **Immediate Response**: Returns a 202 Accepted response with a runId for tracking +3. **Asynchronous Processing**: + - Analysis continues in the background without timeout constraints + - **Condition**: Checks the entity type from the request + - **User Analysis** (True branch): If Type is "User", analyzes user behavior and risk + - **URL Analysis** (False branch): If Type is not "User", analyzes URL security and reputation +4. **Result Retrieval**: Check results via Azure Portal run history using the runId + +### API Request Format + +Send a POST request to your Logic App endpoint with the following JSON body: + +**For User Analysis:** +```json +{ + "Type": "User", + "Value": "Your User Object Id" +} +``` + +**For URL Analysis:** +```json +{ + "Type": "Url", + "Value": "https://example.com" +} +``` + +### API Response + +**Immediate Response (202 Accepted):** +```json +{ + "status": "Accepted", + "message": "Entity analysis started. Processing in background.", + "runId": "08584416635090057123230409437CU07", + "entityType": "User", + "entityValue": "59d9ec06-a7e9-49fa-aba4-f94adee23cab" +} +``` + +**Response Headers:** +- `Status Code`: 202 Accepted +- `Retry-After`: 10 seconds + +### Retrieving Results + +To view the analysis results after receiving the 202 response: + +**Option 1: Azure Portal (Recommended)** +1. Go to your Logic App in Azure Portal +2. Click **"Run history"** in the left menu +3. Find the run using the `runId` from the response +4. View the action outputs to see the full analysis results + +**Option 2: Azure Management API** +```powershell +az rest --method get --url "https://management.azure.com/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logic/workflows/{workflowName}/runs/{runId}?api-version=2016-06-01" +``` + +### Sample Analysis Output + +**URL Analysis:** +```json +{ + "id": "analysis-id", + "status": "Completed", + "classification": "Malicious", + "analysis": "The URL exhibits characteristics of a phishing site...", + "recommendation": "Block this URL and investigate affected users...", + "disclaimer": "This is an AI-generated analysis...", + "dataSourceList": ["Threat Intelligence", "URL Reputation"], + "properties": { + "entityType": "Url" + } +} +``` + +**User Analysis:** +```json +{ + "id": "analysis-id", + "status": "Completed", + "classification": "High Risk", + "analysis": "User shows abnormal login patterns...", + "recommendation": "Investigate recent user activities and consider MFA enforcement...", + "disclaimer": "This is an AI-generated analysis...", + "dataSourceList": ["Sign-in Logs", "Audit Logs"], + "properties": { + "entityType": "User" + } +} +``` + +### Use Cases + +This playbook is ideal for: +- **API Integration**: Integrate entity analysis into custom applications and workflows +- **SOAR Platforms**: Connect with third-party SOAR solutions +- **Automated Scanning**: Batch analysis of URLs from threat feeds +- **User Risk Assessment**: On-demand user behavior analysis +- **External Tools Integration**: Connect with ticketing systems, chatbots, or custom dashboards +- **Webhook Receivers**: Process entities from external security tools + + +### Performance Notes + +- The playbook returns immediately (202 Accepted) to avoid timeout issues +- Analysis processing time depends on the lookBackDays parameter and data volume +- Typical analysis completion: 2-5 minutes for comprehensive analysis +- Use the runId to track analysis progress in Azure Portal + + +### Security Considerations + +- **Endpoint Security**: The HTTP trigger URL contains a SAS token for authentication +- **Keep the URL secure**: Treat it like an API key - don't expose it publicly +- **Regenerate if compromised**: You can regenerate the trigger URL in the Logic App designer +- **Network restrictions**: Consider using Azure networking features to restrict access +- **Input validation**: The Logic App validates the request schema automatically \ No newline at end of file diff --git a/Solutions/SentinelSOARessentials/Playbooks/Incident-Trigger-Entity-Analyzer/azuredeploy.json b/Solutions/SentinelSOARessentials/Playbooks/Incident-Trigger-Entity-Analyzer/azuredeploy.json new file mode 100644 index 00000000000..1392b96eb6d --- /dev/null +++ b/Solutions/SentinelSOARessentials/Playbooks/Incident-Trigger-Entity-Analyzer/azuredeploy.json @@ -0,0 +1,310 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "metadata": { + "title": "Incident Trigger Entity Analyzer", + "description": "This playbook is triggered by Microsoft Sentinel incidents and performs automated investigation and enrichment of URL and User entities associated with the incident.", + "prerequisites": [ + "1. A Microsoft Sentinel workspace must be configured.", + "2. The user deploying this Logic App needs to have a Contributor Role.", + "3. The user has permissions to access Microsoft Sentinel workspace.", + "4. The SentinelMCP connector is available in your environment." + ], + "postDeployment": [ + "1. Authenticate the connections: Go to the Logic App → API connections and authenticate Microsoft Sentinel connection with a user that has Sentinel permissions.", + "2. Authenticate the SentinelMCP connection with Microsoft Sentinel MCP permissions.", + "3. The playbook will automatically trigger when new incidents are created.", + "4. Consider creating an automation rule to run this playbook automatically on specific incident types." + ], + "prerequisitesDeployTemplateFile": "", + "lastUpdateTime": "2025-12-07T00:00:00.000Z", + "entities": [ + "URL", + "Account" + ], + "tags": [ + "Enrichment", + "Utilities", + "Entity Analysis" + ], + "support": { + "tier": "community" + }, + "author": { + "name": "yaniv shasha" + } + }, + "parameters": { + "PlaybookName": { + "defaultValue": "Entity-Analyzer-Incident-Trigger", + "type": "string" + }, + "lookBackDays": { + "defaultValue": 60, + "type": "int", + "metadata": { + "description": "Number of days to look back for entity analysis" + } + } + }, + "variables": { + "MicrosoftSentinelConnectionName": "[concat('MicrosoftSentinel-', parameters('PlaybookName'))]", + "SentinelMCPConnectionName": "[concat('SentinelMCP-', parameters('PlaybookName'))]" + }, + "resources": [ + { + "type": "Microsoft.Logic/workflows", + "apiVersion": "2017-07-01", + "name": "[parameters('PlaybookName')]", + "location": "[resourceGroup().location]", + "identity": { + "type": "SystemAssigned" + }, + "tags": { + "Created By": "ARM Template" + }, + "dependsOn": [ + "[resourceId('Microsoft.Web/connections', variables('MicrosoftSentinelConnectionName'))]", + "[resourceId('Microsoft.Web/connections', variables('SentinelMCPConnectionName'))]" + ], + "properties": { + "state": "Enabled", + "definition": { + "$schema": "https://schema.management.azure.com/providers/Microsoft.Logic/schemas/2016-06-01/workflowdefinition.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "$connections": { + "defaultValue": {}, + "type": "Object" + } + }, + "triggers": { + "Microsoft_Sentinel_incident": { + "type": "ApiConnectionWebhook", + "inputs": { + "host": { + "connection": { + "name": "@parameters('$connections')['azuresentinel']['connectionId']" + } + }, + "body": { + "callback_url": "@listCallbackUrl()" + }, + "path": "/incident-creation" + } + } + }, + "actions": { + "Entities_-_Get_URLs": { + "runAfter": {}, + "type": "ApiConnection", + "inputs": { + "host": { + "connection": { + "name": "@parameters('$connections')['azuresentinel']['connectionId']" + } + }, + "method": "post", + "body": "@triggerBody()?['object']?['properties']?['relatedEntities']", + "path": "/entities/url" + } + }, + "For_each_URL": { + "foreach": "@body('Entities_-_Get_URLs')?['URLs']", + "actions": { + "URL_Analyzer": { + "type": "ApiConnection", + "inputs": { + "host": { + "connection": { + "name": "@parameters('$connections')['sentinelmcp']['connectionId']" + } + }, + "method": "post", + "body": { + "workspaceId": "@triggerBody()?['workspaceId']", + "lookBackDays": "[parameters('lookBackDays')]", + "properties": { + "entityType": "Url", + "url": "@{item()?['Url']}" + } + }, + "path": "/aiprimitives/analysis", + "queries": { + "api-version": "2025-08-01-preview" + } + } + }, + "Compose_Url": { + "runAfter": { + "URL_Analyzer": [ + "Succeeded" + ] + }, + "type": "Compose", + "inputs": "@concat(\r\n'🔗 **URL Analysis for: ', item()?['Url'], '**\\n\\n',\r\n'🏷️ **Classification**\\n\\n',\r\nbody('URL_Analyzer')?['classification'], '\\n\\n',\r\n'🔍 **Analysis Result**\\n\\n',\r\nbody('URL_Analyzer')?['analysis'], '\\n\\n',\r\n'✅ **Recommendation**\\n\\n',\r\nbody('URL_Analyzer')?['recommendation'], '\\n\\n',\r\n'⚠️ **Disclaimer**\\n\\n',\r\n'🤖 ', body('URL_Analyzer')?['disclaimer']\r\n)" + }, + "Add_Url_comment_to_incident": { + "runAfter": { + "Compose_Url": [ + "Succeeded" + ] + }, + "type": "ApiConnection", + "inputs": { + "host": { + "connection": { + "name": "@parameters('$connections')['azuresentinel']['connectionId']" + } + }, + "method": "post", + "body": { + "incidentArmId": "@triggerBody()?['object']?['id']", + "message": "

@{outputs('Compose_Url')}

" + }, + "path": "/Incidents/Comment" + } + } + }, + "runAfter": { + "Entities_-_Get_URLs": [ + "Succeeded" + ] + }, + "type": "Foreach" + }, + "Entities_-_Get_Accounts": { + "runAfter": {}, + "type": "ApiConnection", + "inputs": { + "host": { + "connection": { + "name": "@parameters('$connections')['azuresentinel']['connectionId']" + } + }, + "method": "post", + "body": "@triggerBody()?['object']?['properties']?['relatedEntities']", + "path": "/entities/account" + } + }, + "For_each_User": { + "foreach": "@body('Entities_-_Get_Accounts')?['Accounts']", + "actions": { + "User_Analyzer": { + "type": "ApiConnection", + "inputs": { + "host": { + "connection": { + "name": "@parameters('$connections')['sentinelmcp']['connectionId']" + } + }, + "method": "post", + "body": { + "workspaceId": "@triggerBody()?['workspaceId']", + "lookBackDays": "[parameters('lookBackDays')]", + "properties": { + "entityType": "User", + "userId": "@{item()?['AadUserId']}" + } + }, + "path": "/aiprimitives/analysis", + "queries": { + "api-version": "2025-08-01-preview" + } + } + }, + "Compose_User": { + "runAfter": { + "User_Analyzer": [ + "Succeeded" + ] + }, + "type": "Compose", + "inputs": "@concat(\r\n'👤 **User Analysis for: ', item()?['Name'], '**\\n\\n',\r\n'🏷️ **Classification**\\n\\n',\r\nbody('User_Analyzer')?['classification'], '\\n\\n',\r\n'🔍 **Analysis Result**\\n\\n',\r\nbody('User_Analyzer')?['analysis'], '\\n\\n',\r\n'✅ **Recommendation**\\n\\n',\r\nbody('User_Analyzer')?['recommendation'], '\\n\\n',\r\n'⚠️ **Disclaimer**\\n\\n',\r\n'🤖 ', body('User_Analyzer')?['disclaimer']\r\n)" + }, + "Add_User_comment_to_incident": { + "runAfter": { + "Compose_User": [ + "Succeeded" + ] + }, + "type": "ApiConnection", + "inputs": { + "host": { + "connection": { + "name": "@parameters('$connections')['azuresentinel']['connectionId']" + } + }, + "method": "post", + "body": { + "incidentArmId": "@triggerBody()?['object']?['id']", + "message": "

@{outputs('Compose_User')}

" + }, + "path": "/Incidents/Comment" + } + } + }, + "runAfter": { + "Entities_-_Get_Accounts": [ + "Succeeded" + ] + }, + "type": "Foreach" + } + }, + "outputs": {} + }, + "parameters": { + "$connections": { + "value": { + "azuresentinel": { + "connectionId": "[resourceId('Microsoft.Web/connections', variables('MicrosoftSentinelConnectionName'))]", + "connectionName": "[variables('MicrosoftSentinelConnectionName')]", + "id": "[concat('/subscriptions/', subscription().subscriptionId, '/providers/Microsoft.Web/locations/', resourceGroup().location, '/managedApis/Azuresentinel')]", + "connectionProperties": { + "authentication": { + "type": "ManagedServiceIdentity" + } + } + }, + "sentinelmcp": { + "connectionId": "[resourceId('Microsoft.Web/connections', variables('SentinelMCPConnectionName'))]", + "connectionName": "[variables('SentinelMCPConnectionName')]", + "id": "[concat('/subscriptions/', subscription().subscriptionId, '/providers/Microsoft.Web/locations/', resourceGroup().location, '/managedApis/sentinelmcp')]" + } + } + } + } + } + }, + { + "type": "Microsoft.Web/connections", + "apiVersion": "2016-06-01", + "name": "[variables('MicrosoftSentinelConnectionName')]", + "location": "[resourceGroup().location]", + "kind": "V1", + "properties": { + "displayName": "[variables('MicrosoftSentinelConnectionName')]", + "customParameterValues": {}, + "parameterValueType": "Alternative", + "api": { + "id": "[concat('/subscriptions/', subscription().subscriptionId, '/providers/Microsoft.Web/locations/', resourceGroup().location, '/managedApis/Azuresentinel')]" + } + } + }, + { + "type": "Microsoft.Web/connections", + "apiVersion": "2016-06-01", + "name": "[variables('SentinelMCPConnectionName')]", + "location": "[resourceGroup().location]", + "kind": "V1", + "properties": { + "displayName": "[variables('SentinelMCPConnectionName')]", + "customParameterValues": {}, + "api": { + "id": "[concat('/subscriptions/', subscription().subscriptionId, '/providers/Microsoft.Web/locations/', resourceGroup().location, '/managedApis/sentinelmcp')]" + } + } + } + ] +} \ No newline at end of file diff --git a/Solutions/SentinelSOARessentials/Playbooks/Incident-Trigger-Entity-Analyzer/images/deployment.png b/Solutions/SentinelSOARessentials/Playbooks/Incident-Trigger-Entity-Analyzer/images/deployment.png new file mode 100644 index 00000000000..24b63213bc3 Binary files /dev/null and b/Solutions/SentinelSOARessentials/Playbooks/Incident-Trigger-Entity-Analyzer/images/deployment.png differ diff --git a/Solutions/SentinelSOARessentials/Playbooks/Incident-Trigger-Entity-Analyzer/images/logicapp_dis.png b/Solutions/SentinelSOARessentials/Playbooks/Incident-Trigger-Entity-Analyzer/images/logicapp_dis.png new file mode 100644 index 00000000000..0634de6e2e6 Binary files /dev/null and b/Solutions/SentinelSOARessentials/Playbooks/Incident-Trigger-Entity-Analyzer/images/logicapp_dis.png differ diff --git a/Solutions/SentinelSOARessentials/Playbooks/Incident-Trigger-Entity-Analyzer/readme.md b/Solutions/SentinelSOARessentials/Playbooks/Incident-Trigger-Entity-Analyzer/readme.md new file mode 100644 index 00000000000..243ae079f33 --- /dev/null +++ b/Solutions/SentinelSOARessentials/Playbooks/Incident-Trigger-Entity-Analyzer/readme.md @@ -0,0 +1,138 @@ +# Multi-Entity Analyzer - Microsoft Sentinel Playbook + +Activating the 'Deploy' button initiates the deployment of an Azure Logic App integrated with Microsoft Sentinel MCP Actions, utilizing a Microsoft Sentinel incident trigger. +The Logic App is configured to run when a new incident is created in Sentinel. This Logic App automatically analyzes all URL and User entities within the incident and provides detailed security insights including classification, analysis results, and recommendations for each entity type. + +![Deployment](./images/deployment.png) + +The playbook automatically triggers when: +- A new incident is created in Microsoft Sentinel +- The incident contains URL entities that need security analysis +- The incident contains User/Account entities that require behavioral analysis +- Security analysts need comprehensive automated analysis of multiple entity types + +After the analysis is complete, the MCP Entity Analyzer conducts a comprehensive investigation of each entity type and automatically adds detailed comments to the incident with: +- **URL Analysis**: Security classification, threat intelligence, and URL reputation analysis +- **User Analysis**: Behavioral analysis, risk assessment, and user activity patterns +- **Classification**: Security classification for each entity +- **Analysis Results**: Detailed security analysis findings for each entity +- **Recommendations**: Security recommendations based on the analysis +- **Disclaimer**: AI-generated analysis disclaimer + +### Prerequisites + +Prior to beginning the installation process, it's crucial to confirm that you have met the following prerequisites: +- The user deploying this Logic App needs to have a **Contributor Role** +- The user has permissions to access **Microsoft Sentinel** workspace +- The **SentinelMCP connector** is available in your environment +- The Logic App will automatically use the workspace ID from the incident trigger + +### Parameters + +During deployment, you'll need to provide: +- **PlaybookName**: Name for the Logic App (default: "Entity-Analyzer-Incident-Trigger") +- **lookBackDays**: Number of days to look back for entity analysis (default: 60 days) + +### Deployment + +To deploy the Multi-Entity Analyzer Logic App: +1. Press on the Deploy button below +2. Select your subscription and resource group (use the same tenant where Microsoft Sentinel is configured) +3. Configure the lookBackDays parameter if needed (default is 60 days) +4. The workspace ID will be automatically retrieved from the incident + + + +[![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FSentinelSOARessentials%2FPlaybooks%2FIncident-Trigger-Entity-Analyzer%2Fazuredeploy.json) +[![Deploy to Azure Gov](https://raw.githubusercontent.com/Azure/azure-quickstart-templates/master/1-CONTRIBUTION-GUIDE/images/deploytoazuregov.png)](https://portal.azure.us/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FSentinelSOARessentials%2FPlaybooks%2FIncident-Trigger-Entity-Analyzer%2Fazuredeploy.json) + + +### Post Deployment + +After successful deployment: +- The Logic App will be automatically enabled and ready to use +- **Authenticate the connections**: Go to the Logic App → API connections and authenticate: + - **Microsoft Sentinel connection**: Authenticate with a user that has Sentinel permissions + - **SentinelMCP connection**: Authenticate with Microsoft Sentinel MCP permissions +- The playbook will automatically trigger when new incidents are created +- **Manual execution**: You can also run this playbook manually from the incident page +- **Automation Rule**: Consider creating an automation rule to run this playbook automatically on specific incident types + +![Logic App Designer](./images/logicapp_dis.png) + +### How It Works + +1. **Trigger**: The Logic App triggers when a new incident is created in Microsoft Sentinel +2. **Entity Extraction**: The playbook extracts all URL and User entities from the incident +3. **Parallel Analysis**: + - **URLs**: Each URL is analyzed for security threats, reputation, and classification + - **Users**: Each user account is analyzed for behavioral patterns and risk assessment +4. **Processing**: The analysis results are formatted with emojis and proper formatting +5. **Output**: Separate detailed comments are added to the incident for each analyzed entity: + - One comment per URL entity analyzed + - One comment per User entity analyzed + +### Sample Output + +The playbook generates formatted comments in the incident for each entity type: + +**URL Analysis Output:** +``` +🔗 URL Analysis for: https://example.com + +🏷️ Classification +Malicious/Suspicious/Benign + +🔍 Analysis Result +Detailed security findings and threat intelligence + +✅ Recommendation +Security recommendations and next steps + +⚠️ Disclaimer +🤖 AI-generated analysis notice +``` + +**User Analysis Output:** +``` +👤 User Analysis for: john.doe@company.com + +🏷️ Classification +High Risk/Medium Risk/Low Risk + +🔍 Analysis Result +Behavioral analysis and activity patterns + +✅ Recommendation +User security recommendations + +⚠️ Disclaimer +🤖 AI-generated analysis notice +``` + +### Use Cases + +This playbook is ideal for: +- **Automated Incident Enrichment**: Automatically analyze all entities when incidents are created +- **Phishing Investigations**: Analyze suspicious URLs and affected users simultaneously +- **Insider Threat Detection**: Comprehensive user behavior analysis +- **Malware Investigations**: URL reputation analysis combined with user impact assessment +- **Security Operations Center (SOC)**: Reduce manual analysis time and improve response speed + +### Automation Integration + +- **Manual Execution**: Run from the incident page when needed +- **Automation Rules**: Create rules to trigger automatically based on: + - Incident severity levels + - Specific incident types (phishing, malware, etc.) + - Entity count thresholds + - Specific alert sources + +### Troubleshooting + +- Ensure both API connections are properly authenticated +- Verify the SentinelMCP connector is available in your region +- Check that Microsoft Sentinel MCP is enabled and accessible +- Review the Logic App run history for any failed executions +- Verify the incident contains valid URL or User entities +- Confirm the lookBackDays parameter matches your data retention policy \ No newline at end of file diff --git a/Solutions/SentinelSOARessentials/Playbooks/Url-Trigger-Entity-Analyzer/azuredeploy.json b/Solutions/SentinelSOARessentials/Playbooks/Url-Trigger-Entity-Analyzer/azuredeploy.json new file mode 100644 index 00000000000..4c05d36682c --- /dev/null +++ b/Solutions/SentinelSOARessentials/Playbooks/Url-Trigger-Entity-Analyzer/azuredeploy.json @@ -0,0 +1,211 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "metadata": { + "title": "URL Entity Trigger Analyzer", + "description": "This playbook is triggered manually when a URL entity is selected in a Microsoft Sentinel incident and provides detailed security insights including classification, analysis results, and recommendations.", + "prerequisites": [ + "1. The user deploying this Logic App needs to have a Contributor Role.", + "2. The user has permissions to access Microsoft Sentinel workspace.", + "3. You have the Workspace ID for your Sentinel environment.", + "4. The SentinelMCP connector is available in your environment.", + "5. Access to Microsoft Sentinel portal in Azure (not Defender portal)." + ], + "postDeployment": [ + "1. Authenticate the connections: Go to the Logic App → API connections and authenticate Microsoft Sentinel connection with a user that has Sentinel permissions.", + "2. Authenticate the SentinelMCP connection with Microsoft Sentinel MCP permissions.", + "3. The playbook will be available to run manually from incident entities.", + "4. Results will be automatically added as comments to the relevant incidents." + ], + "prerequisitesDeployTemplateFile": "", + "lastUpdateTime": "2025-12-07T00:00:00.000Z", + "entities": [ + "URL" + ], + "tags": [ + "Enrichment", + "Utilities", + "Entity Analysis" + ], + "support": { + "tier": "community" + }, + "author": { + "name": "yaniv shasha" + } + }, + "parameters": { + "PlaybookName": { + "defaultValue": "Entity-analyzer-Url-Trigger", + "type": "string" + }, + "lookBackDays": { + "defaultValue": 10, + "type": "int", + "metadata": { + "description": "Number of days to look back for entity analysis" + } + }, + "workspaceId": { + "type": "string", + "metadata": { + "description": "The workspace ID for Microsoft Sentinel" + } + } + }, + "variables": { + "MicrosoftSentinelConnectionName": "[concat('MicrosoftSentinel-', parameters('PlaybookName'))]", + "SentinelMCPConnectionName": "[concat('SentinelMCP-', parameters('PlaybookName'))]" + }, + "resources": [ + { + "type": "Microsoft.Logic/workflows", + "apiVersion": "2017-07-01", + "name": "[parameters('PlaybookName')]", + "location": "[resourceGroup().location]", + "identity": { + "type": "SystemAssigned" + }, + "tags": { + "Created By": "ARM Template" + }, + "dependsOn": [ + "[resourceId('Microsoft.Web/connections', variables('MicrosoftSentinelConnectionName'))]", + "[resourceId('Microsoft.Web/connections', variables('SentinelMCPConnectionName'))]" + ], + "properties": { + "state": "Enabled", + "definition": { + "$schema": "https://schema.management.azure.com/providers/Microsoft.Logic/schemas/2016-06-01/workflowdefinition.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "$connections": { + "defaultValue": {}, + "type": "Object" + } + }, + "triggers": { + "Microsoft_Sentinel_entity": { + "type": "ApiConnectionWebhook", + "inputs": { + "host": { + "connection": { + "name": "@parameters('$connections')['azuresentinel']['connectionId']" + } + }, + "body": { + "callback_url": "@listCallbackUrl()" + }, + "path": "/entity/@{encodeURIComponent('UrlEntity')}" + } + } + }, + "actions": { + "URL_Analyzer": { + "runAfter": {}, + "type": "ApiConnection", + "inputs": { + "host": { + "connection": { + "name": "@parameters('$connections')['sentinelmcp']['connectionId']" + } + }, + "method": "post", + "body": { + "workspaceId": "[parameters('workspaceId')]", + "lookBackDays": "[parameters('lookBackDays')]", + "properties": { + "entityType": "Url", + "url": "@{triggerBody()?['Entity']?['properties']?['Url']}" + } + }, + "path": "/aiprimitives/analysis" + } + }, + "Compose": { + "runAfter": { + "URL_Analyzer": [ + "Succeeded" + ] + }, + "type": "Compose", + "inputs": "@concat(\r\n'| 🔍 **Section** | Details |\\n',\r\n'|---|---|\\n',\r\n'| 🏷️ **Classification** | ', replace(replace(replace(coalesce(body('URL_Analyzer')?['classification'], 'N/A'), '\\r\\n', '\\n'), '\\n', '
'), '|', '\\\\|'), ' |\\n',\r\n'| 🕵️ **Entity Type** | ', replace(replace(replace(coalesce(body('URL_Analyzer')?['properties']?['entityType'], 'N/A'), '\\r\\n', '\\n'), '\\n', '
'), '|', '\\\\|'), ' |\\n',\r\n'| 🔎 **Analysis Result** | ', replace(replace(replace(replace(coalesce(body('URL_Analyzer')?['analysis'], 'N/A'), '- ', '• '), '\\r\\n', '\\n'), '\\n', '
'), '|', '\\\\|'), ' |\\n',\r\n'| ✅ **Recommendation** | ', replace(replace(replace(coalesce(body('URL_Analyzer')?['recommendation'], 'N/A'), '\\r\\n', '\\n'), '\\n', '
'), '|', '\\\\|'), ' |\\n',\r\n'| ⚠️ **Disclaimer** | 🤖 ', replace(replace(replace(coalesce(body('URL_Analyzer')?['disclaimer'], 'N/A'), '\\r\\n', '\\n'), '\\n', '
'), '|', '\\\\|'), ' |\\n',\r\n'| 📂 **Data Sources** | ', if(equals(empty(body('URL_Analyzer')?['dataSourceList']), true), 'N/A', concat('• ', replace(join(body('URL_Analyzer')?['dataSourceList'], '\\n• '), '\\n', '
'))), ' |'\r\n)" + }, + "Add_comment_to_incident_(V3)": { + "runAfter": { + "Compose": [ + "Succeeded" + ] + }, + "type": "ApiConnection", + "inputs": { + "host": { + "connection": { + "name": "@parameters('$connections')['azuresentinel']['connectionId']" + } + }, + "method": "post", + "body": { + "incidentArmId": "@triggerBody()?['IncidentArmID']", + "message": "

@{outputs('Compose')}

" + }, + "path": "/Incidents/Comment" + } + } + }, + "outputs": {} + }, + "parameters": { + "$connections": { + "value": { + "azuresentinel": { + "connectionId": "[resourceId('Microsoft.Web/connections', variables('MicrosoftSentinelConnectionName'))]", + "connectionName": "[variables('MicrosoftSentinelConnectionName')]", + "id": "[concat('/subscriptions/', subscription().subscriptionId, '/providers/Microsoft.Web/locations/', resourceGroup().location, '/managedApis/Azuresentinel')]", + "connectionProperties": { + "authentication": { + "type": "ManagedServiceIdentity" + } + } + }, + "sentinelmcp": { + "connectionId": "[resourceId('Microsoft.Web/connections', variables('SentinelMCPConnectionName'))]", + "connectionName": "[variables('SentinelMCPConnectionName')]", + "id": "[concat('/subscriptions/', subscription().subscriptionId, '/providers/Microsoft.Web/locations/', resourceGroup().location, '/managedApis/sentinelmcp')]" + } + } + } + } + } + }, + { + "type": "Microsoft.Web/connections", + "apiVersion": "2016-06-01", + "name": "[variables('MicrosoftSentinelConnectionName')]", + "location": "[resourceGroup().location]", + "kind": "V1", + "properties": { + "displayName": "[variables('MicrosoftSentinelConnectionName')]", + "customParameterValues": {}, + "parameterValueType": "Alternative", + "api": { + "id": "[concat('/subscriptions/', subscription().subscriptionId, '/providers/Microsoft.Web/locations/', resourceGroup().location, '/managedApis/Azuresentinel')]" + } + } + }, + { + "type": "Microsoft.Web/connections", + "apiVersion": "2016-06-01", + "name": "[variables('SentinelMCPConnectionName')]", + "location": "[resourceGroup().location]", + "kind": "V1", + "properties": { + "displayName": "[variables('SentinelMCPConnectionName')]", + "customParameterValues": {}, + "api": { + "id": "[concat('/subscriptions/', subscription().subscriptionId, '/providers/Microsoft.Web/locations/', resourceGroup().location, '/managedApis/sentinelmcp')]" + } + } + } + ] +} \ No newline at end of file diff --git a/Solutions/SentinelSOARessentials/Playbooks/Url-Trigger-Entity-Analyzer/images/deployment.png b/Solutions/SentinelSOARessentials/Playbooks/Url-Trigger-Entity-Analyzer/images/deployment.png new file mode 100644 index 00000000000..0afb98549b7 Binary files /dev/null and b/Solutions/SentinelSOARessentials/Playbooks/Url-Trigger-Entity-Analyzer/images/deployment.png differ diff --git a/Solutions/SentinelSOARessentials/Playbooks/Url-Trigger-Entity-Analyzer/images/logicapp_dis.png b/Solutions/SentinelSOARessentials/Playbooks/Url-Trigger-Entity-Analyzer/images/logicapp_dis.png new file mode 100644 index 00000000000..29104de55e6 Binary files /dev/null and b/Solutions/SentinelSOARessentials/Playbooks/Url-Trigger-Entity-Analyzer/images/logicapp_dis.png differ diff --git a/Solutions/SentinelSOARessentials/Playbooks/Url-Trigger-Entity-Analyzer/images/trigger.png b/Solutions/SentinelSOARessentials/Playbooks/Url-Trigger-Entity-Analyzer/images/trigger.png new file mode 100644 index 00000000000..63217193b5a Binary files /dev/null and b/Solutions/SentinelSOARessentials/Playbooks/Url-Trigger-Entity-Analyzer/images/trigger.png differ diff --git a/Solutions/SentinelSOARessentials/Playbooks/Url-Trigger-Entity-Analyzer/readme.md b/Solutions/SentinelSOARessentials/Playbooks/Url-Trigger-Entity-Analyzer/readme.md new file mode 100644 index 00000000000..7fa5fd659d3 --- /dev/null +++ b/Solutions/SentinelSOARessentials/Playbooks/Url-Trigger-Entity-Analyzer/readme.md @@ -0,0 +1,122 @@ +# URL Entity Analyzer - Microsoft Sentinel Playbook + +Activating the 'Deploy' button initiates the deployment of an Azure Logic App integrated with Microsoft Sentinel MCP Actions, utilizing a Microsoft Sentinel entity trigger. +The Logic App is configured to run manually when a URL entity is selected in a Sentinel incident. This Logic App analyzes suspicious URLs and provides detailed security insights including classification, analysis results, and recommendations. + +![Deployment](./images/deployment.png) + +**Important Note:** As of now, this playbook only works when triggered from the **Microsoft Sentinel portal in Azure**. It is not currently supported in the Defender portal. + +The playbook can be manually triggered when: +- A URL entity is identified in a Microsoft Sentinel incident +- Security analysts need detailed analysis of suspicious URLs +- Automated threat intelligence is required for URL-based investigations + +After the analysis is complete, the MCP Entity Analyzer conducts a comprehensive investigation of the URL entity and automatically adds a detailed comment to the incident with: +- **Classification**: Security classification of the URL +- **Entity Type**: Confirmation of the URL entity type +- **Analysis Result**: Detailed security analysis findings +- **Recommendation**: Security recommendations based on the analysis +- **Disclaimer**: AI-generated analysis disclaimer +- **Data Sources**: List of data sources used in the analysis + +### Prerequisites + +Prior to beginning the installation process, it's crucial to confirm that you have met the following prerequisites: +- The user deploying this Logic App needs to have a **Contributor Role** +- The user has permissions to access **Microsoft Sentinel** workspace +- You have the **Workspace ID** for your Sentinel environment +- The **SentinelMCP connector** is available in your environment +- Access to **Microsoft Sentinel portal in Azure** (not Defender portal) + +### Parameters + +During deployment, you'll need to provide: +- **PlaybookName**: Name for the Logic App (default: "Entity-analyzer-Url-Trigger") +- **lookBackDays**: Number of days to look back for entity analysis (default: 10 days) +- **workspaceId**: Your Microsoft Sentinel workspace ID (required) + +### Deployment + +To deploy the URL Entity Analyzer Logic App: +1. Press on the Deploy button below +2. Select your subscription and resource group (use the same tenant where Microsoft Sentinel is configured) +3. Provide the required Workspace ID parameter +4. Configure the lookBackDays parameter if needed (default is 10 days) + + + +[![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FSentinelSOARessentials%2FPlaybooks%2FUrl-Trigger-Entity-Analyzer%2Fazuredeploy.json) +[![Deploy to Azure Gov](https://raw.githubusercontent.com/Azure/azure-quickstart-templates/master/1-CONTRIBUTION-GUIDE/images/deploytoazuregov.png)](https://portal.azure.us/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FSentinelSOARessentials%2FPlaybooks%2FUrl-Trigger-Entity-Analyzer%2Fazuredeploy.json) + +### Post Deployment + +After successful deployment: +- The Logic App will be automatically enabled and ready to use +- **Authenticate the connections**: Go to the Logic App → API connections and authenticate: + - **Microsoft Sentinel connection**: Authenticate with a user that has Sentinel permissions + - **SentinelMCP connection**: Authenticate with Microsoft Sentinel MCP permissions +- The playbook will be available to run manually from incident entities +- Results will be automatically added as comments to the relevant incidents + +![Logic App Designer](./images/logicapp_dis.png) + +### How to Run the Playbook + +To manually trigger the URL Entity Analyzer: + +1. Navigate to **Microsoft Sentinel** in the Azure portal +2. Go to **Incidents** and open an incident containing URL entities +3. Click on the **Entities** tab +4. Select a **URL entity** from the list +5. Click on **Run playbook** button in the top right +6. Select **Entity-analyzer-Url-Trigger** from the playbook list +7. The analysis will run and results will be added as a comment to the incident + +![Run Playbook](./images/trigger.png) + +### How It Works + +1. **Manual Trigger**: The Logic App is manually triggered when a security analyst selects a URL entity in a Sentinel incident and runs the playbook +2. **Analysis**: The URL is sent to Microsoft Sentinel's MCP Entity Analyzer for comprehensive analysis using the SentinelMCP connector +3. **Processing**: The analysis results are formatted into a readable table format with emojis and proper formatting +4. **Output**: A detailed comment is automatically added to the incident containing: + - Security classification of the URL + - Detailed analysis results + - Security recommendations + - Data sources used + - AI-generated disclaimer + +### Sample Output + +The playbook generates a formatted table comment in the incident with sections like: + +| 🔍 **Section** | Details | +|---|---| +| 🏷️ **Classification** | Malicious/Suspicious/Benign | +| 🕵️ **Entity Type** | Url | +| 🔎 **Analysis Result** | Detailed security findings | +| ✅ **Recommendation** | Security recommendations | +| ⚠️ **Disclaimer** | 🤖 AI-generated analysis notice | +| 📂 **Data Sources** | List of threat intelligence sources | + +### Use Cases + +This playbook is ideal for: +- **Phishing Investigations**: Analyze suspicious URLs from phishing emails +- **Malware Analysis**: Investigate URLs associated with malware campaigns +- **Threat Intelligence**: Enrich incidents with automated URL reputation analysis +- **Security Operations Center (SOC)**: Reduce manual analysis time for URL-based threats +- **Incident Response**: Quick assessment of URL entities during active investigations +- **On-Demand Analysis**: Run analysis only when needed for specific URL entities + +### Troubleshooting + +- Ensure both API connections are properly authenticated +- Verify the Workspace ID is correct for your Sentinel environment +- Check that Microsoft Sentinel MCP is enabled and accessible +- Confirm the SentinelMCP connector is available in your region +- Review the Logic App run history for any failed executions +- Verify the URL entity is properly formatted in the incident +- **Important**: Make sure you're running the playbook from the **Azure Sentinel portal**, not the Defender portal +- Ensure the incident contains a valid URL entity before attempting to run the playbook \ No newline at end of file diff --git a/Solutions/SentinelSOARessentials/ReleaseNotes.md b/Solutions/SentinelSOARessentials/ReleaseNotes.md index 6beac2ce5ce..5eebd91dfed 100644 --- a/Solutions/SentinelSOARessentials/ReleaseNotes.md +++ b/Solutions/SentinelSOARessentials/ReleaseNotes.md @@ -1,5 +1,6 @@ | **Version** | **Date Modified (DD-MM-YYY)** | **Change History** | |-------------|--------------------------------|--------------------------------------------------------------------------------------------| +| 3.0.4 | 17-11-2025 | Added new **playbooks** for the Sentinel SentinelSOARessentials solution. | | 3.0.3 | 30-05-2025 | This upgrade focused on improving **Playbook** functionality, updating documentation, and refining deployment parameters. | | 3.0.2 | 26-10-2023 | Changes for rebranding from Microsoft 365 Defender to Microsoft Defender XDR. | | 3.0.1 | 11-08-2023 | Updated timeContextFromParameter with TimeRange in the **Workbook** template. | diff --git a/Solutions/VaronisSaaS/Data Connectors/Varonis.Sentinel.Functions.zip b/Solutions/VaronisSaaS/Data Connectors/Varonis.Sentinel.Functions.zip index e64878f19f5..904d7c0200f 100644 Binary files a/Solutions/VaronisSaaS/Data Connectors/Varonis.Sentinel.Functions.zip and b/Solutions/VaronisSaaS/Data Connectors/Varonis.Sentinel.Functions.zip differ diff --git a/Solutions/VaronisSaaS/Data Connectors/VaronisSaaSFunction/Varonis.Sentinel.Functions/Helpers/AlertExtensions.cs b/Solutions/VaronisSaaS/Data Connectors/VaronisSaaSFunction/Varonis.Sentinel.Functions/Helpers/AlertExtensions.cs index 526e07556bf..f4d5ca16281 100644 --- a/Solutions/VaronisSaaS/Data Connectors/VaronisSaaSFunction/Varonis.Sentinel.Functions/Helpers/AlertExtensions.cs +++ b/Solutions/VaronisSaaS/Data Connectors/VaronisSaaSFunction/Varonis.Sentinel.Functions/Helpers/AlertExtensions.cs @@ -20,7 +20,8 @@ public static class AlertExtensions { ["High"] = 0, ["Medium"] = 1, - ["Low"] = 2 + ["Low"] = 2, + ["Informational"] = 3 }; } } diff --git a/Solutions/VaronisSaaS/Data Connectors/VaronisSaaSFunction/Varonis.Sentinel.Functions/local.settings.json b/Solutions/VaronisSaaS/Data Connectors/VaronisSaaSFunction/Varonis.Sentinel.Functions/local.settings.json index 18ee795631c..43f4be8dd0f 100644 --- a/Solutions/VaronisSaaS/Data Connectors/VaronisSaaSFunction/Varonis.Sentinel.Functions/local.settings.json +++ b/Solutions/VaronisSaaS/Data Connectors/VaronisSaaSFunction/Varonis.Sentinel.Functions/local.settings.json @@ -9,7 +9,7 @@ "LogAnalyticsKey": "", "LogAnalyticsWorkspace": "", "AlertRetrievalStart": "2 weeks", - "AlertSeverity": "Low, Medium, High", + "AlertSeverity": "Low, Medium, High, Informational", "ThreatDetectionPolicies": "", "AlertStatus": "New, Under Investigation", "MaxAlertRetrieval": "1000" diff --git a/Solutions/VaronisSaaS/Data Connectors/VaronisSaaS_API_FunctionApp.json b/Solutions/VaronisSaaS/Data Connectors/VaronisSaaS_API_FunctionApp.json index 93067261cef..64b112a68e2 100644 --- a/Solutions/VaronisSaaS/Data Connectors/VaronisSaaS_API_FunctionApp.json +++ b/Solutions/VaronisSaaS/Data Connectors/VaronisSaaS_API_FunctionApp.json @@ -23,6 +23,11 @@ "baseQuery": "VaronisAlerts_CL\n| where Severity_s == \"Low\"", "legend": "Low severity alerts", "metricName": "Low severity alerts" + }, + { + "baseQuery": "VaronisAlerts_CL\n| where Severity_s == \"Informational\"", + "legend": "Informational severity alerts", + "metricName": "Informational severity alerts" } ], "sampleQueries": [ diff --git a/Solutions/VaronisSaaS/Data Connectors/azuredeploy.bicep b/Solutions/VaronisSaaS/Data Connectors/azuredeploy.bicep index d3aa25b5b58..4d051c7f68f 100644 --- a/Solutions/VaronisSaaS/Data Connectors/azuredeploy.bicep +++ b/Solutions/VaronisSaaS/Data Connectors/azuredeploy.bicep @@ -31,7 +31,7 @@ param threatDetectionPolicies string = '' param alertStatus string = 'New, Under Investigation' @description('Specify the alert severity.') -param alertSeverity string = 'Low, Medium, High' +param alertSeverity string = 'Low, Medium, High, Informational' var functionAppName = 'VaronisSaaS-${uniqueString(resourceGroup().id)}' var functionWorkerRuntime = 'dotnet' diff --git a/Solutions/VaronisSaaS/Data Connectors/azuredeploy.json b/Solutions/VaronisSaaS/Data Connectors/azuredeploy.json index e8c083c6b9f..c54feb879a7 100644 --- a/Solutions/VaronisSaaS/Data Connectors/azuredeploy.json +++ b/Solutions/VaronisSaaS/Data Connectors/azuredeploy.json @@ -71,7 +71,7 @@ }, "alertSeverity": { "type": "string", - "defaultValue": "Low, Medium, High", + "defaultValue": "Low, Medium, High, Informational", "metadata": { "description": "Specify the alert severity." } diff --git a/Solutions/VaronisSaaS/Data/Solution_VaronisSaaS.json b/Solutions/VaronisSaaS/Data/Solution_VaronisSaaS.json index 148dce7f4c6..586df36f98b 100644 --- a/Solutions/VaronisSaaS/Data/Solution_VaronisSaaS.json +++ b/Solutions/VaronisSaaS/Data/Solution_VaronisSaaS.json @@ -6,7 +6,7 @@ "Workbooks": ["Workbooks/VaronisSaaS.json"], "Data Connectors": ["Data Connectors/VaronisSaaS_API_FunctionApp.json"], "BasePath": "C:\\Projects\\DataIntegration\\Azure-Sentinel\\Solutions\\VaronisSaaS", - "Version": "3.0.2", + "Version": "3.0.3", "Metadata": "SolutionMetadata.json", "TemplateSpec": true, "Is1PConnector": false diff --git a/Solutions/VaronisSaaS/Package/3.0.3.zip b/Solutions/VaronisSaaS/Package/3.0.3.zip new file mode 100644 index 00000000000..c06be68eab8 Binary files /dev/null and b/Solutions/VaronisSaaS/Package/3.0.3.zip differ diff --git a/Solutions/VaronisSaaS/Package/mainTemplate.json b/Solutions/VaronisSaaS/Package/mainTemplate.json index 3460bdb659c..31c5eca4e34 100644 --- a/Solutions/VaronisSaaS/Package/mainTemplate.json +++ b/Solutions/VaronisSaaS/Package/mainTemplate.json @@ -39,7 +39,7 @@ }, "variables": { "_solutionName": "VaronisSaaS", - "_solutionVersion": "3.0.2", + "_solutionVersion": "3.0.3", "solutionId": "varonis.microsoft-sentinel-solution-varonissaas", "_solutionId": "[variables('solutionId')]", "workbookVersion1": "1.0.0", @@ -70,7 +70,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "VaronisSaaS Workbook with template version 3.0.2", + "description": "VaronisSaaS Workbook with template version 3.0.3", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('workbookVersion1')]", @@ -157,7 +157,7 @@ "[extensionResourceId(resourceId('Microsoft.OperationalInsights/workspaces', parameters('workspace')), 'Microsoft.SecurityInsights/contentPackages', variables('_solutionId'))]" ], "properties": { - "description": "VaronisSaaS data connector with template version 3.0.2", + "description": "VaronisSaaS data connector with template version 3.0.3", "mainTemplate": { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", "contentVersion": "[variables('dataConnectorVersion1')]", @@ -196,6 +196,11 @@ "baseQuery": "VaronisAlerts_CL\n| where Severity_s == \"Low\"", "legend": "Low severity alerts", "metricName": "Low severity alerts" + }, + { + "baseQuery": "VaronisAlerts_CL\n| where Severity_s == \"Informational\"", + "legend": "Informational severity alerts", + "metricName": "Informational severity alerts" } ], "sampleQueries": [ @@ -387,6 +392,11 @@ "baseQuery": "VaronisAlerts_CL\n| where Severity_s == \"Low\"", "legend": "Low severity alerts", "metricName": "Low severity alerts" + }, + { + "baseQuery": "VaronisAlerts_CL\n| where Severity_s == \"Informational\"", + "legend": "Informational severity alerts", + "metricName": "Informational severity alerts" } ], "dataTypes": [ @@ -484,7 +494,7 @@ "apiVersion": "2023-04-01-preview", "location": "[parameters('workspace-location')]", "properties": { - "version": "3.0.2", + "version": "3.0.3", "kind": "Solution", "contentSchemaVersion": "3.0.0", "displayName": "VaronisSaaS", diff --git a/Solutions/VaronisSaaS/ReleaseNotes.md b/Solutions/VaronisSaaS/ReleaseNotes.md index a12f2404657..8fdbef4d60e 100644 --- a/Solutions/VaronisSaaS/ReleaseNotes.md +++ b/Solutions/VaronisSaaS/ReleaseNotes.md @@ -1,5 +1,6 @@ | **Version** | **Date Modified (DD-MM-YYYY)** | **Change History** | |-------------|--------------------------------|---------------------------------------------| +| 3.0.3 | 25-11-2025 | Add Informational severity level support | | 3.0.2 | 12-09-2025 | Save last alert ingest time | | 3.0.1 | 02-12-2025 | Bug fixes | | 3.0.0 | 02-07-2024 | Refactor azure function | \ No newline at end of file diff --git a/Tools/Solutions Analyzer/README.md b/Tools/Solutions Analyzer/README.md index 8a2440dc38c..473e382e2db 100644 --- a/Tools/Solutions Analyzer/README.md +++ b/Tools/Solutions Analyzer/README.md @@ -1,29 +1,27 @@ # Azure Sentinel Solutions Analyzer -This tool analyzes Azure Sentinel Solutions to extract and map data connector definitions to their ingestion tables, producing comprehensive CSV reports for solution metadata analysis. +This directory contains two complementary tools for analyzing Microsoft Sentinel Solutions: + +1. **`solution_connector_tables.py`** - Extracts and maps data connector definitions to their ingestion tables, producing CSV reports with solution metadata +2. **`generate_connector_docs.py`** - Generates browsable markdown documentation from the CSV data with AI-rendered setup instructions ## Quick Start -**Pre-generated CSV files and documentation are already available in this directory:** -- `solutions_connectors_tables_mapping.csv` - Main mapping of connectors to tables with full metadata -- `solutions_connectors_tables_issues_and_exceptions_report.csv` - Issues and exceptions report -- [`connector-docs/`](connector-docs/) - [Microsoft Sentinel Data Connector Reference](connector-docs/README.md) with browsable indexes by solutions, connectors, and tables +**Pre-generated files are already available in this directory:** +- [`solutions_connectors_tables_mapping.csv`](solutions_connectors_tables_mapping.csv) - Main mapping of connectors to tables with full metadata +- [`solutions_connectors_tables_issues_and_exceptions_report.csv`](solutions_connectors_tables_issues_and_exceptions_report.csv) - Issues and exceptions report -You can use these files directly without running the script. They are kept up-to-date with the Solutions directory. +You can use these files directly without running the scripts. They are kept up-to-date with the Solutions directory. -To regenerate the files with the latest data: -```bash -python solution_connector_tables.py -``` +--- -To regenerate the markdown documentation: -```bash -python generate_connector_docs.py -``` +# 1. Solution Connector Tables Analyzer + +**Script:** `solution_connector_tables.py` ## Overview -The analyzer scans the Solutions directory to: +Scans the Solutions directory to: - Extract table references from connector JSON files (queries, sample queries, data types) - Resolve parser function references to actual tables - Flatten solution metadata from SolutionMetadata.json files @@ -33,9 +31,8 @@ The analyzer scans the Solutions directory to: **Note:** Solutions without data connectors are included in the CSV output with empty `connector_id`, `connector_title`, `connector_description`, `connector_publisher`, `connector_files`, and `Table` fields. This ensures complete solution coverage in the documentation while clearly indicating which solutions do not include data ingestion components. -## Installation and Requirements +## Prerequisites -### Prerequisites - Python 3.7 or higher - No external dependencies required (optional: json5 for enhanced JSON parsing) @@ -44,7 +41,8 @@ The analyzer scans the Solutions directory to: pip install json5 # For improved JSON parsing with comments and trailing commas ``` -### Running the Script +## Running the Script + From the `Tools/Solutions Analyzer` directory: ```bash python solution_connector_tables.py @@ -78,23 +76,11 @@ python solution_connector_tables.py --output custom_output.csv --report custom_r ## Output Files -### 1. Microsoft Sentinel Data Connector Reference (connector-docs/) - -Browsable markdown documentation generated from the CSV data, providing: - -- **[Solutions Index](connector-docs/solutions-index.md)** - All solutions organized alphabetically (with and without connectors) -- **[Connectors Index](connector-docs/connectors-index.md)** - All unique connectors with metadata -- **[Tables Index](connector-docs/tables-index.md)** - All unique tables with solution references -- **Individual Solution Pages** - Detailed pages for each solution with connector and table information (in `solutions/` directory) -- **Individual Connector Pages** - Detailed pages for each connector with usage information (in `connectors/` directory) - -See the [connector-docs README](connector-docs/README.md) for full documentation. - -### 2. solutions_connectors_tables_mapping.csv (Primary Output) +### 1. solutions_connectors_tables_mapping.csv (Primary Output) The main CSV file containing one row per unique combination of solution, connector, and table. -**Note:** Newlines in the `connector_description` field are replaced with `
` tags to ensure proper rendering in GitHub's CSV viewer while preserving formatting information. +**Note:** Newlines in the `connector_description` and `connector_permissions` fields are replaced with `
` tags to ensure proper rendering in GitHub's CSV viewer. The `connector_instruction_steps` field uses standard JSON encoding with `\n` for newlines as it contains JSON-formatted data. #### Column Descriptions @@ -117,6 +103,8 @@ The main CSV file containing one row per unique combination of solution, connect | `connector_publisher` | Connector publisher name. Empty for solutions without data connectors. | | `connector_title` | Connector display title. Empty for solutions without data connectors. | | `connector_description` | Connector description (newlines replaced with `
` for GitHub CSV rendering). Empty for solutions without data connectors. | +| `connector_instruction_steps` | Setup and configuration instructions from connector UI definitions, stored as JSON-encoded string. Rendered in documentation using Microsoft Sentinel UI definitions. Empty for solutions without data connectors. | +| `connector_permissions` | Required permissions and prerequisites from connector UI definitions, stored as JSON-encoded string. Rendered in documentation according to Microsoft Sentinel permissions schema (resourceProvider, customs, licenses, tenant). Empty for solutions without data connectors. | | `connector_files` | Semicolon-separated list of GitHub URLs to connector definition files. Empty for solutions without data connectors. | | `is_unique` | `true` if table appears in only one connector file, `false` otherwise | | `table_detection_methods` | (Optional, with --show-detection-methods) Semicolon-separated list of methods used to detect this table | @@ -134,7 +122,7 @@ https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/{solution_name} https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/{solution_name}/Data Connectors/{file_path} ``` -### 3. solutions_connectors_tables_issues_and_exceptions_report.csv (Issues Report) +### 2. solutions_connectors_tables_issues_and_exceptions_report.csv (Issues Report) Contains exceptions and issues encountered during analysis. @@ -163,8 +151,6 @@ Contains exceptions and issues encountered during analysis. | `missing_connector_json` | Data Connectors folder exists but contains no valid JSON | Solution has no connector entries | | `missing_solution_metadata` | Solution has connectors but no SolutionMetadata.json | Solution appears with empty metadata fields | -**Note:** `parser_tables_resolved` entries are automatically filtered from the report as they represent successful parser-to-table resolution. - ## Detection Logic ### Table Detection Methods @@ -257,28 +243,175 @@ python solution_connector_tables.py --show-detection-methods This will include the `table_detection_methods` column showing exactly how each table was detected. -## Updating the Script +--- -The script is located at: -``` -Tools/Solutions Analyzer/solution_connector_tables.py -``` +# 2. Connector Documentation Generator + +**Script:** `generate_connector_docs.py` + +## Overview + +Generates browsable markdown documentation from the CSV data produced by `solution_connector_tables.py`. The documentation includes: + +- Three index pages (solutions, connectors, tables) +- Individual pages for each solution with connector details +- Individual pages for each connector with usage information +- **AI-rendered setup instructions** extracted from connector UI definitions + +## Output + +The script generates the **Microsoft Sentinel Data Connector Reference** documentation in the `connector-docs/` directory: + +- **[Solutions Index](connector-docs/solutions-index.md)** - All solutions organized alphabetically (with and without connectors) +- **[Connectors Index](connector-docs/connectors-index.md)** - All unique connectors with metadata +- **[Tables Index](connector-docs/tables-index.md)** - All unique tables with solution references +- **Individual Solution Pages** - Detailed pages for each solution with connector and table information (in [`solutions/`](connector-docs/solutions/) directory) +- **Individual Connector Pages** - Detailed pages for each connector with usage information (in [`connectors/`](connector-docs/connectors/) directory) + +See the [connector-docs README](connector-docs/README.md) for full documentation. + +## Prerequisites + +- Python 3.7 or higher +- Pre-generated CSV file from `solution_connector_tables.py` +- No external dependencies required + +## Running the Script + +From the `Tools/Solutions Analyzer` directory: -After modifications, test with: ```bash -cd "Tools/Solutions Analyzer" -python solution_connector_tables.py +python generate_connector_docs.py +``` + +The script reads `solutions_connectors_tables_mapping.csv` and generates all documentation in the `connector-docs/` directory. + +## Output Structure + +The generated documentation is organized as: + +``` +connector-docs/ +├── README.md # Documentation guide +├── solutions-index.md # Alphabetical list of all solutions +├── connectors-index.md # Alphabetical list of all connectors +├── tables-index.md # Alphabetical list of all tables +├── solutions/ # Individual solution pages (477 files) +│ ├── 1password.md +│ ├── aws-cloudfront.md +│ └── ... +└── connectors/ # Individual connector pages (503 files) + ├── 1passwordeventreporter.md + ├── awscloudfront.md + └── ... ``` -## Contributing +### Generated Content + +**Solution Pages** include: +- Solution metadata (publisher, support, categories) +- List of connectors in the solution +- Setup instructions for each connector (AI-rendered) +- Required permissions and prerequisites +- Tables ingested by each connector +- Links to connector definition files + +**Connector Pages** include: +- Connector description and metadata +- **AI-rendered setup instructions and permissions** from connector UI definitions with step-by-step guidance +- Required permissions and prerequisites (rendered from Microsoft Sentinel permissions schema) +- List of solutions using this connector +- Tables ingested by the connector +- Links to GitHub connector definition files + +**Index Pages** provide: +- Alphabetical navigation +- Quick statistics +- Cross-references between solutions, connectors, and tables + +## AI-Rendered Setup Instructions and Permissions + +The "Setup Instructions" and "Permissions" sections in the generated connector documentation are **automatically rendered from connector UI definition files**. These sections interpret the UI-centric JSON structures that define the Azure Portal configuration interface and convert them into readable documentation. + +### ⚠️ Important Disclaimer + +**These AI-rendered instructions and permissions may not be fully accurate.** They are generated by interpreting UI definition metadata and should always be verified against the actual Microsoft Sentinel portal before implementation. The content provides a helpful starting point but is not a substitute for official documentation or hands-on portal verification. -When adding new detection methods or modifying the logic: -1. Update the `table_detection_methods` tracking in `record_table()` function -2. Test with `--show-detection-methods` flag to verify detection sources -3. Update this README with new detection methods or column descriptions -4. Validate output doesn't introduce false positives (field names detected as tables) +### How It Works + +The rendering process involves several steps: + +1. **JSON Parsing**: The script extracts `instructionSteps` and `permissions` objects from connector definition files in the Solutions directory +2. **UI Type Detection**: Each instruction step has a `type` property (e.g., `DataConnectorsGrid`, `ContextPane`, `GCPGrid`) that determines how it should be interpreted +3. **Permissions Schema Parsing**: Permission objects are rendered according to the Microsoft Sentinel permissions schema, including: + - **resourceProvider**: Azure resource provider permissions with scope, required actions (read/write/delete/action) + - **customs**: Custom prerequisites with names and descriptions + - **licenses**: Required Microsoft 365 licenses with friendly names + - **tenant**: Azure AD tenant permissions with required roles +4. **AI-Powered Rendering**: Specialized handlers for each UI type convert the JSON structure into descriptive markdown: + - Form fields (textboxes, dropdowns) are described with their purposes and validation requirements + - Management grids and data selectors are explained with their configuration options + - Portal-only interfaces are identified and marked with clear indicators + - Permission requirements are formatted with clear scope and action descriptions +5. **Markdown Formatting**: The rendered content is formatted with emoji indicators, step numbers, and disclaimers + +### UI Types Supported + +The script includes specialized handlers for connector UI configuration types based on the [official Microsoft Sentinel data connector UI definitions reference](https://learn.microsoft.com/en-us/azure/sentinel/data-connector-ui-definitions-reference#instructionsteps): + +**Standard Instruction Types:** + +- **OAuthForm**: OAuth authentication forms with client credentials +- **Textbox**: Input fields for text, passwords, numbers, and email addresses +- **Dropdown**: Selection lists with single or multi-select options +- **Markdown**: Formatted text content with links and formatting +- **CopyableLabel**: Text fields with copy-to-clipboard functionality +- **InfoMessage**: Inline information messages with contextual help +- **ConnectionToggleButton**: Connect/disconnect toggle controls +- **InstructionStepsGroup**: Collapsible groups of nested instructions +- **InstallAgent**: Links to Azure portal sections for agent installation (18 link types supported) + +**UI-Centric Configuration Types:** + +- **DataConnectorsGrid**: Interactive data connector management interface with enable/disable controls +- **ContextPane**: Sidebar configuration panels with detailed settings +- **GCPGrid** / **GCPContextPane**: Google Cloud Platform specific configuration interfaces +- **AADDataTypes**: Azure Active Directory data type selectors +- **MCasDataTypes**: Microsoft Defender for Cloud Apps data type selectors +- **OfficeDataTypes**: Microsoft 365 data type selectors + +Instructions for 74 connectors using these UI-centric configuration interfaces have been enhanced with AI-rendered setup guidance. + +### Example Output + +Instructions are formatted with: + +- 📋 Portal-only interfaces clearly marked +- 📝 Form fields with descriptions and placeholders +- ⚠️ Disclaimers about AI generation and accuracy +- 🔗 Links to GitHub connector definition files + +--- ## Version History -- **v1.0** - Initial release with basic table detection -- **v2.0** - Added parser resolution, context-aware detection, enhanced JSON parsing, flattened metadata, GitHub URLs +### v3.0 + +- Added `connector_instruction_steps` and `connector_permissions` fields to CSV output +- Added AI-rendered connector setup instructions from UI definitions +- Added individual table detail pages for tables with multiple solutions or connectors +- Improved tables index with limited inline display and clickable "+X more" links + +### v2.0 + +- Added parser resolution and context-aware table detection +- Enhanced JSON parsing tolerance for malformed connector definitions +- Flattened metadata extraction from nested solution structures +- Added GitHub URLs for all file references +- Improved error handling and validation + +## v1.0 + +- Initial release with basic table detection from connector JSON files +- CSV output with solution, connector, and table mappings +- Issues and exceptions reporting diff --git a/Tools/Solutions Analyzer/connector-docs/README.md b/Tools/Solutions Analyzer/connector-docs/README.md index d753404545c..0087a4c8954 100644 --- a/Tools/Solutions Analyzer/connector-docs/README.md +++ b/Tools/Solutions Analyzer/connector-docs/README.md @@ -17,9 +17,23 @@ Individual solution pages are organized in the [`solutions/`](solutions/) direct - Solution metadata (title, publisher, description) - List of data connectors included in the solution - Detailed connector information (ID, title, description) +- Setup instructions (AI-generated from UI definitions - verify in portal) +- Required permissions and prerequisites - Tables associated with each connector - Table uniqueness indicators (whether a table is used by only one connector) +### Connector Pages + +Individual connector pages are organized in the [`connectors/`](connectors/) directory. Each connector page includes: + +- Connector metadata (ID, publisher, tables ingested) +- Full connector description +- Required permissions and prerequisites +- **Setup Instructions** - Step-by-step configuration guidance rendered from connector UI definitions using AI + - ⚠️ **Note**: Instructions are automatically rendered from the user interface definition files using AI and may not be fully accurate. Always verify configuration steps in the Microsoft Sentinel portal. +- Solutions that include this connector +- Links to connector definition files on GitHub + ## 📊 Quick Statistics For current statistics, see the [Solutions Index](solutions-index.md) which displays up-to-date counts of solutions (with and without connectors), connectors, and tables. @@ -64,6 +78,17 @@ This documentation is automatically generated from the Solutions Analyzer tool, The analyzer identifies table references in connector configurations and parser logic to create comprehensive mappings. +### AI-Generated Instructions + +**Setup Instructions** in connector documentation are automatically extracted from connector UI definition files using AI: +- Interprets UI-centric instruction types (DataConnectorsGrid, ContextPane, GCPGrid, AADDataTypes, etc.) +- Converts JSON UI definitions to readable markdown format +- Generates step-by-step configuration guidance +- Describes form fields, dropdowns, and management interfaces +- Marks portal-only features with visual indicators + +⚠️ **Important**: AI-generated instructions may not be fully accurate. Always verify all configuration steps in the Microsoft Sentinel portal before implementation. + ## 📝 Data Source The documentation is based on analysis of the `solutions_connectors_tables_mapping.csv` file, which contains: diff --git a/Tools/Solutions Analyzer/connector-docs/connectors-index.md b/Tools/Solutions Analyzer/connector-docs/connectors-index.md index fc914173143..208db5ecd6f 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors-index.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors-index.md @@ -12,7 +12,7 @@ Browse all data connectors available in Microsoft Sentinel Solutions. ## Overview -This page lists **503 unique connectors** across all solutions. +This page lists **462 unique connectors** across all solutions. **Jump to:** [#](##) | [A](#a) | [B](#b) | [C](#c) | [D](#d) | [E](#e) | [F](#f) | [G](#g) | [H](#h) | [I](#i) | [J](#j) | [K](#k) | [L](#l) | [M](#m) | [N](#n) | [O](#o) | [P](#p) | [Q](#q) | [R](#r) | [S](#s) | [T](#t) | [V](#v) | [W](#w) | [Z](#z) @@ -94,20 +94,6 @@ The 1Password CCP connector allows the user to ingest 1Password Audit, Signin & --- -### [[Recommended] Infoblox Cloud Data Connector via AMA](connectors/infobloxclouddataconnectorama.md) - -**Publisher:** Infoblox - -**Solution:** [Infoblox](solutions/infoblox.md) - -**Tables (1):** `CommonSecurityLog` - -The Infoblox Cloud Data Connector allows you to easily connect your Infoblox data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log. - -[→ View full connector details](connectors/infobloxclouddataconnectorama.md) - ---- - ### [[Recommended] Infoblox SOC Insight Data Connector via AMA](connectors/infobloxsocinsightsdataconnector-ama.md) **Publisher:** Infoblox @@ -595,20 +581,6 @@ Azure Activity Log is a subscription log that provides insight into subscription --- -### [Azure Batch Account](connectors/azurebatchaccount-ccp.md) - -**Publisher:** Microsoft - -**Solution:** [Azure Batch Account](solutions/azure-batch-account.md) - -**Tables (1):** `AzureDiagnostics` - -Azure Batch Account is a uniquely identified entity within the Batch service. Most Batch solutions use Azure Storage for storing resource files and output files, so each Batch account is usually associated with a corresponding storage account. This connector lets you stream your Azure Batch account diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2224103&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -[→ View full connector details](connectors/azurebatchaccount-ccp.md) - ---- - ### [Azure CloudNGFW By Palo Alto Networks](connectors/azurecloudngfwbypaloaltonetworks.md) **Publisher:** Palo Alto Networks @@ -623,48 +595,6 @@ Cloud Next-Generation Firewall by Palo Alto Networks - an Azure Native ISV Servi --- -### [Azure Cognitive Search](connectors/azurecognitivesearch-ccp.md) - -**Publisher:** Microsoft - -**Solution:** [Azure Cognitive Search](solutions/azure-cognitive-search.md) - -**Tables (1):** `AzureDiagnostics` - -Azure Cognitive Search is a cloud search service that gives developers infrastructure, APIs, and tools for building a rich search experience over private, heterogeneous content in web, mobile, and enterprise applications. This connector lets you stream your Azure Cognitive Search diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity. - -[→ View full connector details](connectors/azurecognitivesearch-ccp.md) - ---- - -### [Azure DDoS Protection](connectors/ddos.md) - -**Publisher:** Microsoft - -**Solution:** [Azure DDoS Protection](solutions/azure-ddos-protection.md) - -**Tables (1):** `AzureDiagnostics` - -Connect to Azure DDoS Protection Standard logs via Public IP Address Diagnostic Logs. In addition to the core DDoS protection in the platform, Azure DDoS Protection Standard provides advanced DDoS mitigation capabilities against network attacks. It's automatically tuned to protect your specific Azure resources. Protection is simple to enable during the creation of new virtual networks. It can also be done after creation and requires no application or resource changes. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2219760&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -[→ View full connector details](connectors/ddos.md) - ---- - -### [Azure Data Lake Storage Gen1](connectors/azuredatalakestoragegen1-ccp.md) - -**Publisher:** Microsoft - -**Solution:** [Azure Data Lake Storage Gen1](solutions/azure-data-lake-storage-gen1.md) - -**Tables (1):** `AzureDiagnostics` - -Azure Data Lake Storage Gen1 is an enterprise-wide hyper-scale repository for big data analytic workloads. Azure Data Lake enables you to capture data of any size, type, and ingestion speed in one single place for operational and exploratory analytics. This connector lets you stream your Azure Data Lake Storage Gen1 diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2223812&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -[→ View full connector details](connectors/azuredatalakestoragegen1-ccp.md) - ---- - ### [Azure DevOps Audit Logs (via Codeless Connector Platform)](connectors/azuredevopsauditlogs.md) **Publisher:** Microsoft @@ -679,146 +609,6 @@ The Azure DevOps Audit Logs data connector allows you to ingest audit events fro --- -### [Azure Event Hub](connectors/azureeventhub-ccp.md) - -**Publisher:** Microsoft - -**Solution:** [Azure Event Hubs](solutions/azure-event-hubs.md) - -**Tables (1):** `AzureDiagnostics` - -Azure Event Hubs is a big data streaming platform and event ingestion service. It can receive and process millions of events per second. This connector lets you stream your Azure Event Hub diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity. - -[→ View full connector details](connectors/azureeventhub-ccp.md) - ---- - -### [Azure Firewall](connectors/azurefirewall.md) - -**Publisher:** Microsoft - -**Solution:** [Azure Firewall](solutions/azure-firewall.md) - -**Tables (10):** `AZFWApplicationRule`, `AZFWDnsQuery`, `AZFWFatFlow`, `AZFWFlowTrace`, `AZFWIdpsSignature`, `AZFWInternalFqdnResolutionFailure`, `AZFWNatRule`, `AZFWNetworkRule`, `AZFWThreatIntel`, `AzureDiagnostics` - -Connect to Azure Firewall. Azure Firewall is a managed, cloud-based network security service that protects your Azure Virtual Network resources. It's a fully stateful firewall as a service with built-in high availability and unrestricted cloud scalability. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220124&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -[→ View full connector details](connectors/azurefirewall.md) - ---- - -### [Azure Key Vault](connectors/azurekeyvault.md) - -**Publisher:** Microsoft - -**Solution:** [Azure Key Vault](solutions/azure-key-vault.md) - -**Tables (1):** `AzureDiagnostics` - -Azure Key Vault is a cloud service for securely storing and accessing secrets. A secret is anything that you want to tightly control access to, such as API keys, passwords, certificates, or cryptographic keys. This connector lets you stream your Azure Key Vault diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity in all your instances. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220125&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -[→ View full connector details](connectors/azurekeyvault.md) - ---- - -### [Azure Kubernetes Service (AKS)](connectors/azurekubernetes.md) - -**Publisher:** Microsoft - -**Solution:** [Azure kubernetes Service](solutions/azure-kubernetes-service.md) - -**Tables (3):** `AzureDiagnostics`, `ContainerInventory`, `KubeEvents` - -Azure Kubernetes Service (AKS) is an open-source, fully-managed container orchestration service that allows you to deploy, scale, and manage Docker containers and container-based applications in a cluster environment. This connector lets you stream your Azure Kubernetes Service (AKS) diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity in all your instances. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2219762&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -[→ View full connector details](connectors/azurekubernetes.md) - ---- - -### [Azure Logic Apps](connectors/azurelogicapps-ccp.md) - -**Publisher:** Microsoft - -**Solution:** [Azure Logic Apps](solutions/azure-logic-apps.md) - -**Tables (1):** `AzureDiagnostics` - -Azure Logic Apps is a cloud-based platform for creating and running automated workflows that integrate your apps, data, services, and systems. This connector lets you stream your Azure Logic Apps diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity. - -[→ View full connector details](connectors/azurelogicapps-ccp.md) - ---- - -### [Azure SQL Databases](connectors/azuresql.md) - -**Publisher:** Microsoft - -**Solution:** [Azure SQL Database solution for sentinel](solutions/azure-sql-database-solution-for-sentinel.md) - -**Tables (1):** `AzureDiagnostics` - -Azure SQL is a fully managed, Platform-as-a-Service (PaaS) database engine that handles most database management functions, such as upgrading, patching, backups, and monitoring, without necessitating user involvement. This connector lets you stream your Azure SQL databases audit and diagnostic logs into Microsoft Sentinel, allowing you to continuously monitor activity in all your instances. - -[→ View full connector details](connectors/azuresql.md) - ---- - -### [Azure Service Bus](connectors/azureservicebus-ccp.md) - -**Publisher:** Microsoft - -**Solution:** [Azure Service Bus](solutions/azure-service-bus.md) - -**Tables (1):** `AzureDiagnostics` - -Azure Service Bus is a fully managed enterprise message broker with message queues and publish-subscribe topics (in a namespace). This connector lets you stream your Azure Service Bus diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity. - -[→ View full connector details](connectors/azureservicebus-ccp.md) - ---- - -### [Azure Storage Account](connectors/azurestorageaccount.md) - -**Publisher:** Microsoft - -**Solution:** [Azure Storage](solutions/azure-storage.md) - -**Tables (5):** `AzureMetrics`, `StorageBlobLogs`, `StorageFileLogs`, `StorageQueueLogs`, `StorageTableLogs` - -Azure Storage account is a cloud solution for modern data storage scenarios. It contains all your data objects: blobs, files, queues, tables, and disks. This connector lets you stream Azure Storage accounts diagnostics logs into your Microsoft Sentinel workspace, allowing you to continuously monitor activity in all your instances, and detect malicious activity in your organization. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220068&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -[→ View full connector details](connectors/azurestorageaccount.md) - ---- - -### [Azure Stream Analytics](connectors/azurestreamanalytics-ccp.md) - -**Publisher:** Microsoft - -**Solution:** [Azure Stream Analytics](solutions/azure-stream-analytics.md) - -**Tables (1):** `AzureDiagnostics` - -Azure Stream Analytics is a real-time analytics and complex event-processing engine that is designed to analyze and process high volumes of fast streaming data from multiple sources simultaneously. This connector lets you stream your Azure Stream Analytics hub diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity. - -[→ View full connector details](connectors/azurestreamanalytics-ccp.md) - ---- - -### [Azure Web Application Firewall (WAF)](connectors/waf.md) - -**Publisher:** Microsoft - -**Solution:** [Azure Web Application Firewall (WAF)](solutions/azure-web-application-firewall-(waf).md) - -**Tables (1):** `AzureDiagnostics` - -Connect to the Azure Web Application Firewall (WAF) for Application Gateway, Front Door, or CDN. This WAF protects your applications from common web vulnerabilities such as SQL injection and cross-site scripting, and lets you customize rules to reduce false positives. Follow these instructions to stream your Microsoft Web application firewall logs into Microsoft Sentinel. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2223546&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -[→ View full connector details](connectors/waf.md) - ---- - ## B ### [BETTER Mobile Threat Defense (MTD)](connectors/bettermtd.md) @@ -1102,34 +892,6 @@ This is data connector for Check Point Cyberint IOC. --- -### [Cisco ASA via Legacy Agent](connectors/ciscoasa.md) - -**Publisher:** Cisco - -**Solution:** [CiscoASA](solutions/ciscoasa.md) - -**Tables (1):** `CommonSecurityLog` - -The Cisco ASA firewall connector allows you to easily connect your Cisco ASA logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities. - -[→ View full connector details](connectors/ciscoasa.md) - ---- - -### [Cisco ASA/FTD via AMA](connectors/ciscoasaama.md) - -**Publisher:** Microsoft - -**Solution:** [CiscoASA](solutions/ciscoasa.md) - -**Tables (1):** `CommonSecurityLog` - -The Cisco ASA firewall connector allows you to easily connect your Cisco ASA logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities. - -[→ View full connector details](connectors/ciscoasaama.md) - ---- - ### [Cisco Cloud Security](connectors/ciscoumbrelladataconnector.md) **Publisher:** Cisco @@ -1373,34 +1135,6 @@ The Cohesity function apps provide the ability to ingest Cohesity Datahawk ranso --- -### [Common Event Format (CEF)](connectors/cef.md) - -**Publisher:** Any - -**Solution:** [Common Event Format](solutions/common-event-format.md) - -**Tables (1):** `CommonSecurityLog` - -Common Event Format (CEF) is an industry standard format on top of Syslog messages, used by many security vendors to allow event interoperability among different platforms. By connecting your CEF logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2223902&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -[→ View full connector details](connectors/cef.md) - ---- - -### [Common Event Format (CEF) via AMA](connectors/cefama.md) - -**Publisher:** Microsoft - -**Solution:** [Common Event Format](solutions/common-event-format.md) - -**Tables (1):** `CommonSecurityLog` - -Common Event Format (CEF) is an industry standard format on top of Syslog messages, used by many security vendors to allow event interoperability among different platforms. By connecting your CEF logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2223547&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -[→ View full connector details](connectors/cefama.md) - ---- - ### [CommvaultSecurityIQ](connectors/commvaultsecurityiq-cl.md) **Publisher:** Commvault @@ -1571,7 +1305,7 @@ Follow the steps to gain access to Cyborg Security's Community and setup the 'Op --- -### [Cyera DSPM Azure Functions Sentinel Data Connector](connectors/cyerafunctionsconnector.md) +### [Cyera DSPM Azure Functions Microsoft Sentinel Data Connector](connectors/cyerafunctionsconnector.md) **Publisher:** Cyera Inc @@ -1579,13 +1313,13 @@ Follow the steps to gain access to Cyborg Security's Community and setup the 'Op **Tables (5):** `CyeraAssets_CL`, `CyeraAssets_MS_CL`, `CyeraClassifications_CL`, `CyeraIdentities_CL`, `CyeraIssues_CL` -The **Cyera DSPM Azure Function Connector** enables seamless ingestion of Cyera’s **Data Security Posture Management (DSPM)** telemetry — *Assets*, *Identities*, *Issues*, and *Classifications* — into **Microsoft Sentinel**.\n\nThis connector uses an **Azure Function App** to call Cyera’s REST API on a schedule, fetch the latest DSPM telemetry, and send it to Sentinel through the **Azure Monitor Logs Ingestion API** via a **Data Collection Endpoint (DCE)** and **Data Collection Rule (DCR, kind: Direct)** — no agents required.\n\n**Tables created/used**\n\n| Entity | Table | Purpose |\n|---|---|---|\n| Assets | `CyeraAssets_CL` | Raw asset metadata and data-store context |\n| Identities | `CyeraIdentities_CL` | Identity definitions and sensitivity context |\n| Issues | `CyeraIssues_CL` | Findings and remediation details |\n| Classifications | `CyeraClassifications_CL` | Data class & sensitivity definitions |\n| MS View | `CyeraAssets_MS_CL` | Normalized asset view for dashboards |\n\n> **Note:** This v7 connector supersedes the earlier CCF-based approach and aligns with Microsoft’s recommended Direct ingestion path for Sentinel. +The **Cyera DSPM Azure Function Connector** enables seamless ingestion of Cyera’s **Data Security Posture Management (DSPM)** telemetry — *Assets*, *Identities*, *Issues*, and *Classifications* — into **Microsoft Sentinel**.\n\nThis connector uses an **Azure Function App** to call Cyera’s REST API on a schedule, fetch the latest DSPM telemetry, and send it to Microsoft Sentinel through the **Azure Monitor Logs Ingestion API** via a **Data Collection Endpoint (DCE)** and **Data Collection Rule (DCR, kind: Direct)** — no agents required.\n\n**Tables created/used**\n\n| Entity | Table | Purpose |\n|---|---|---|\n| Assets | `CyeraAssets_CL` | Raw asset metadata and data-store context |\n| Identities | `CyeraIdentities_CL` | Identity definitions and sensitivity context |\n| Issues | `CyeraIssues_CL` | Findings and remediation details |\n| Classifications | `CyeraClassifications_CL` | Data class & sensitivity definitions |\n| MS View | `CyeraAssets_MS_CL` | Normalized asset view for dashboards |\n\n> **Note:** This v7 connector supersedes the earlier CCF-based approach and aligns with Microsoft’s recommended Direct ingestion path for Microsoft Sentinel. [→ View full connector details](connectors/cyerafunctionsconnector.md) --- -### [Cyera DSPM Azure Sentinel Data Connector](connectors/cyeradspmccf.md) +### [Cyera DSPM Microsoft Sentinel Data Connector](connectors/cyeradspmccf.md) **Publisher:** Cyera Inc @@ -1593,7 +1327,7 @@ The **Cyera DSPM Azure Function Connector** enables seamless ingestion of Cyera **Tables (5):** `CyeraAssets_CL`, `CyeraAssets_MS_CL`, `CyeraClassifications_CL`, `CyeraIdentities_CL`, `CyeraIssues_CL` -The [Cyera DSPM](https://api.cyera.io/) data connector allows you to connect to your Cyera's DSPM tenant and ingesting Classifications, Assets, Issues, and Identity Resources/Definitions into Microsoft Sentinel. The data connector is built on Microsoft Sentinel's Codeless Connector Framework and uses the Cyera's API to fetch Cyera's [DSPM Telemetry](https://www.cyera.com/) once recieced can be correlated with security events creating custom columns so that queries don't need to parse it again, thus resulting in better performance. +The [Cyera DSPM](https://api.cyera.io/) data connector allows you to connect to your Cyera's DSPM tenant and ingesting Classifications, Assets, Issues, and Identity Resources/Definitions into Microsoft Sentinel. The data connector is built on Microsoft Sentinel's Codeless Connector Framework and uses the Cyera's API to fetch Cyera's [DSPM Telemetry](https://www.cyera.com/) once received can be correlated with security events creating custom columns so that queries don't need to parse it again, thus resulting in better performance. [→ View full connector details](connectors/cyeradspmccf.md) @@ -1615,29 +1349,6 @@ The [Cynerio](https://www.cynerio.com/) connector allows you to easily connect y ## D -### [DNS](connectors/dns.md) - -**Publisher:** Microsoft - -**Solution:** [Windows Server DNS](solutions/windows-server-dns.md) - -**Tables (2):** `DnsEvents`, `DnsInventory` - -The DNS log connector allows you to easily connect your DNS analytic and audit logs with Microsoft Sentinel, and other related data, to improve investigation. - -**When you enable DNS log collection you can:** -- Identify clients that try to resolve malicious domain names. -- Identify stale resource records. -- Identify frequently queried domain names and talkative DNS clients. -- View request load on DNS servers. -- View dynamic DNS registration failures. - -For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220127&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -[→ View full connector details](connectors/dns.md) - ---- - ### [Darktrace Connector for Microsoft Sentinel REST API](connectors/darktracerestconnector.md) **Publisher:** Darktrace @@ -3064,44 +2775,6 @@ The MailRisk by Secure Practice connector allows you to ingest email threat inte --- -### [Microsoft 365 (formerly, Office 365)](connectors/office365.md) - -**Publisher:** Microsoft - -**Solution:** [Microsoft 365](solutions/microsoft-365.md) - -**Tables (4):** `OfficeActivity`, `exchange`, `sharePoint`, `teams` - -The Microsoft 365 (formerly, Office 365) activity log connector provides insight into ongoing user activities. You will get details of operations such as file downloads, access requests sent, changes to group events, set-mailbox and details of the user who performed the actions. By connecting Microsoft 365 logs into Microsoft Sentinel you can use this data to view dashboards, create custom alerts, and improve your investigation process. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2219943&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -[→ View full connector details](connectors/office365.md) - ---- - -### [Microsoft 365 Insider Risk Management](connectors/officeirm.md) - -**Publisher:** Microsoft - -**Solution:** [MicrosoftPurviewInsiderRiskManagement](solutions/microsoftpurviewinsiderriskmanagement.md) - -**Tables (1):** `SecurityAlert` - -Microsoft 365 Insider Risk Management is a compliance solution in Microsoft 365 that helps minimize internal risks by enabling you to detect, investigate, and act on malicious and inadvertent activities in your organization. Risk analysts in your organization can quickly take appropriate actions to make sure users are compliant with your organization's compliance standards. - -Insider risk policies allow you to: - -- define the types of risks you want to identify and detect in your organization. -- decide on what actions to take in response, including escalating cases to Microsoft Advanced eDiscovery if needed. - -This solution produces alerts that can be seen by Office customers in the Insider Risk Management solution in Microsoft 365 Compliance Center. -[Learn More](https://aka.ms/OfficeIRMConnector) about Insider Risk Management. - -These alerts can be imported into Microsoft Sentinel with this connector, allowing you to see, investigate, and respond to them in a broader organizational threat context. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2223721&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -[→ View full connector details](connectors/officeirm.md) - ---- - ### [Microsoft Copilot](connectors/microsoftcopilot.md) **Publisher:** Microsoft @@ -3130,90 +2803,6 @@ Microsoft Sentinel provides you the capability to import threat intelligence gen --- -### [Microsoft Defender XDR](connectors/microsoftthreatprotection.md) - -**Publisher:** Microsoft - -**Solution:** [Microsoft Defender XDR](solutions/microsoft-defender-xdr.md) - -**Tables (22):** `AlertEvidence`, `CloudAppEvents`, `DeviceEvents`, `DeviceFileCertificateInfo`, `DeviceFileEvents`, `DeviceImageLoadEvents`, `DeviceInfo`, `DeviceLogonEvents`, `DeviceNetworkEvents`, `DeviceNetworkInfo`, `DeviceProcessEvents`, `DeviceRegistryEvents`, `EmailAttachmentInfo`, `EmailEvents`, `EmailPostDeliveryEvents`, `EmailUrlInfo`, `IdentityDirectoryEvents`, `IdentityLogonEvents`, `IdentityQueryEvents`, `SecurityAlert`, `SecurityIncident`, `UrlClickEvents` - -Microsoft Defender XDR is a unified, natively integrated, pre- and post-breach enterprise defense suite that protects endpoint, identity, email, and applications and helps you detect, prevent, investigate, and automatically respond to sophisticated threats. - -Microsoft Defender XDR suite includes: -- Microsoft Defender for Endpoint -- Microsoft Defender for Identity -- Microsoft Defender for Office 365 -- Threat & Vulnerability Management -- Microsoft Defender for Cloud Apps - -For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220004&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -[→ View full connector details](connectors/microsoftthreatprotection.md) - ---- - -### [Microsoft Defender for Cloud Apps](connectors/microsoftcloudappsecurity.md) - -**Publisher:** Microsoft - -**Solution:** [Microsoft Defender for Cloud Apps](solutions/microsoft-defender-for-cloud-apps.md) - -**Tables (3):** `McasShadowItReporting`, `SecurityAlert`, `discoveryLogs` - -By connecting with [Microsoft Defender for Cloud Apps](https://aka.ms/asi-mcas-connector-description) you will gain visibility into your cloud apps, get sophisticated analytics to identify and combat cyberthreats, and control how your data travels. - -- Identify shadow IT cloud apps on your network. -- Control and limit access based on conditions and session context. -- Use built-in or custom policies for data sharing and data loss prevention. -- Identify high-risk use and get alerts for unusual user activities with Microsoft behavioral analytics and anomaly detection capabilities, including ransomware activity, impossible travel, suspicious email forwarding rules, and mass download of files. -- Mass download of files - -[Deploy now >](https://aka.ms/asi-mcas-connector-deploynow) - -[→ View full connector details](connectors/microsoftcloudappsecurity.md) - ---- - -### [Microsoft Defender for Endpoint](connectors/microsoftdefenderadvancedthreatprotection.md) - -**Publisher:** Microsoft - -**Solution:** [MicrosoftDefenderForEndpoint](solutions/microsoftdefenderforendpoint.md) - -**Tables (1):** `SecurityAlert` - -Microsoft Defender for Endpoint is a security platform designed to prevent, detect, investigate, and respond to advanced threats. The platform creates alerts when suspicious security events are seen in an organization. Fetch alerts generated in Microsoft Defender for Endpoint to Microsoft Sentinel so that you can effectively analyze security events. You can create rules, build dashboards and author playbooks for immediate response. For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2220128&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -[→ View full connector details](connectors/microsoftdefenderadvancedthreatprotection.md) - ---- - -### [Microsoft Defender for Identity](connectors/azureadvancedthreatprotection.md) - -**Publisher:** Microsoft - -**Solution:** [Microsoft Defender For Identity](solutions/microsoft-defender-for-identity.md) - -**Tables (1):** `SecurityAlert` - -Connect Microsoft Defender for Identity to gain visibility into the events and user analytics. Microsoft Defender for Identity identifies, detects, and helps you investigate advanced threats, compromised identities, and malicious insider actions directed at your organization. Microsoft Defender for Identity enables SecOp analysts and security professionals struggling to detect advanced attacks in hybrid environments to: - -- Monitor users, entity behavior, and activities with learning-based analytics​ -- Protect user identities and credentials stored in Active Directory -- Identify and investigate suspicious user activities and advanced attacks throughout the kill chain -- Provide clear incident information on a simple timeline for fast triage - -[Try now >](https://aka.ms/AtpTryNow) - -[Deploy now >](https://aka.ms/AzureATP_Deploy) - -For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2220069&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -[→ View full connector details](connectors/azureadvancedthreatprotection.md) - ---- - ### [Microsoft Defender for Office 365 (Preview)](connectors/officeatp.md) **Publisher:** Microsoft @@ -3241,36 +2830,6 @@ For more information, see the [Microsoft Sentinel documentation](https://go.micr --- -### [Microsoft Entra ID](connectors/azureactivedirectory.md) - -**Publisher:** Microsoft - -**Solution:** [Microsoft Entra ID](solutions/microsoft-entra-id.md) - -**Tables (22):** `AADManagedIdentitySignInLogs`, `AADNonInteractiveUserSignInLogs`, `AADProvisioningLogs`, `AADRiskyServicePrincipals`, `AADRiskyUsers`, `AADServicePrincipalRiskEvents`, `AADServicePrincipalSignInLogs`, `AADUserRiskEvents`, `ADFSSignInLogs`, `AuditLogs`, `ManagedIdentitySignInLogs`, `NetworkAccessTraffic`, `NetworkAccessTrafficLogs`, `NonInteractiveUserSignInLogs`, `ProvisioningLogs`, `RiskyServicePrincipals`, `RiskyUsers`, `ServicePrincipalRiskEvents`, `ServicePrincipalSignInLogs`, `SignInLogs`, `SigninLogs`, `UserRiskEvents` - -Gain insights into Microsoft Entra ID by connecting Audit and Sign-in logs to Microsoft Sentinel to gather insights around Microsoft Entra ID scenarios. You can learn about app usage, conditional access policies, legacy auth relate details using our Sign-in logs. You can get information on your Self Service Password Reset (SSPR) usage, Microsoft Entra ID Management activities like user, group, role, app management using our Audit logs table. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/?linkid=2219715&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -[→ View full connector details](connectors/azureactivedirectory.md) - ---- - -### [Microsoft Entra ID Protection](connectors/azureactivedirectoryidentityprotection.md) - -**Publisher:** Microsoft - -**Solution:** [Microsoft Entra ID Protection](solutions/microsoft-entra-id-protection.md) - -**Tables (1):** `SecurityAlert` - -Microsoft Entra ID Protection provides a consolidated view at risk users, risk events and vulnerabilities, with the ability to remediate risk immediately, and set policies to auto-remediate future events. The service is built on Microsoft’s experience protecting consumer identities and gains tremendous accuracy from the signal from over 13 billion logins a day. Integrate Microsoft Microsoft Entra ID Protection alerts with Microsoft Sentinel to view dashboards, create custom alerts, and improve investigation. For more information, see the [Microsoft Sentinel documentation ](https://go.microsoft.com/fwlink/p/?linkid=2220065&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -[Get Microsoft Entra ID Premium P1/P2 ](https://aka.ms/asi-ipcconnectorgetlink) - -[→ View full connector details](connectors/azureactivedirectoryidentityprotection.md) - ---- - ### [Microsoft Exchange Admin Audit Logs by Event Logs](connectors/esi-opt1exchangeadminauditlogsbyeventlogs.md) **Publisher:** Microsoft @@ -3341,20 +2900,6 @@ Microsoft PowerBI is a collection of software services, apps, and connectors tha --- -### [Microsoft Project](connectors/office365project.md) - -**Publisher:** Microsoft - -**Solution:** [Microsoft Project](solutions/microsoft-project.md) - -**Tables (1):** `ProjectActivity` - -Microsoft Project (MSP) is a project management software solution. Depending on your plan, Microsoft Project lets you plan projects, assign tasks, manage resources, create reports and more. This connector allows you to stream your Azure Project audit logs into Microsoft Sentinel in order to track your project activities. - -[→ View full connector details](connectors/office365project.md) - ---- - ### [Microsoft Purview](connectors/microsoftazurepurview.md) **Publisher:** Microsoft @@ -3727,28 +3272,6 @@ The [Netskope Web Transactions](https://docs.netskope.com/en/netskope-help/data- --- -### [Network Security Groups](connectors/azurensg.md) - -**Publisher:** Microsoft - -**Solution:** [Azure Network Security Groups](solutions/azure-network-security-groups.md) - -**Tables (1):** `AzureDiagnostics` - -Azure network security groups (NSG) allow you to filter network traffic to and from Azure resources in an Azure virtual network. A network security group includes rules that allow or deny traffic to a virtual network subnet, network interface, or both. - -When you enable logging for an NSG, you can gather the following types of resource log information: - -- **Event:** Entries are logged for which NSG rules are applied to VMs, based on MAC address. -- **Rule counter:** Contains entries for how many times each NSG rule is applied to deny or allow traffic. The status for these rules is collected every 300 seconds. - - -This connector lets you stream your NSG diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity in all your instances. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2223718&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -[→ View full connector details](connectors/azurensg.md) - ---- - ### [Noname Security for Microsoft Sentinel](connectors/nonamesecuritymicrosoftsentinel.md) **Publisher:** Noname Security @@ -3849,20 +3372,6 @@ Onapsis Defend Integration is aimed at forwarding alerts and logs collected and --- -### [One Identity Safeguard](connectors/oneidentity.md) - -**Publisher:** One Identity LLC. - -**Solution:** [OneIdentity](solutions/oneidentity.md) - -**Tables (1):** `CommonSecurityLog` - -The One Identity Safeguard (CEF) Sentinel data connector enhances the standard Common Event Format (CEF) connector with Safeguard for Privileged Sessions-specific dashboards. Use this connector to easily start utilizing the events generated by your device for visualization, alerts, investigations and more. - -[→ View full connector details](connectors/oneidentity.md) - ---- - ### [OneLogin IAM Platform (via Codeless Connector Framework)](connectors/oneloginiamlogsccpdefinition.md) **Publisher:** Microsoft @@ -4316,6 +3825,20 @@ The SINEC Security Guard solution for Microsoft Sentinel allows you to ingest se --- +### [SOC Prime Platform Audit Logs Data Connector](connectors/socprimeauditlogsdataconnector.md) + +**Publisher:** Microsoft + +**Solution:** [SOC Prime CCF](solutions/soc-prime-ccf.md) + +**Tables (1):** `SOCPrimeAuditLogs_CL` + +The [SOC Prime Audit Logs](https://help.socprime.com/en/articles/6265791-api) data connector allows ingesting logs from the SOC Prime Platform API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the SOC Prime Platform API to fetch SOC Prime platform audit logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table, thus resulting in better performance. + +[→ View full connector details](connectors/socprimeauditlogsdataconnector.md) + +--- + ### [SaaS Security](connectors/valencesecurity.md) **Publisher:** Valence Security @@ -4372,20 +3895,6 @@ Samsung Knox Asset Intelligence Data Connector lets you centralize your mobile s --- -### [Security Events via Legacy Agent](connectors/securityevents.md) - -**Publisher:** Microsoft - -**Solution:** [Windows Security Events](solutions/windows-security-events.md) - -**Tables (1):** `SecurityEvent` - -You can stream all security events from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization’s network and improves your security operation capabilities. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220093&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -[→ View full connector details](connectors/securityevents.md) - ---- - ### [SecurityBridge Solution for SAP](connectors/securitybridge.md) **Publisher:** SecurityBridge Group GmbH @@ -4685,36 +4194,6 @@ The [StyxView Alerts](https://styxintel.com/) data connector enables seamless in --- -### [Subscription-based Microsoft Defender for Cloud (Legacy)](connectors/azuresecuritycenter.md) - -**Publisher:** Microsoft - -**Solution:** [Microsoft Defender for Cloud](solutions/microsoft-defender-for-cloud.md) - -**Tables (1):** `SecurityAlert` - -Microsoft Defender for Cloud is a security management tool that allows you to detect and quickly respond to threats across Azure, hybrid, and multi-cloud workloads. This connector allows you to stream your security alerts from Microsoft Defender for Cloud into Microsoft Sentinel, so you can view Defender data in workbooks, query it to produce alerts, and investigate and respond to incidents. - -[For more information>](https://aka.ms/ASC-Connector) - -[→ View full connector details](connectors/azuresecuritycenter.md) - ---- - -### [Symantec Integrated Cyber Defense Exchange](connectors/symantec.md) - -**Publisher:** Symantec - -**Solution:** [Symantec Integrated Cyber Defense](solutions/symantec-integrated-cyber-defense.md) - -**Tables (1):** `SymantecICDx_CL` - -Symantec ICDx connector allows you to easily connect your Symantec security solutions logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization’s network and improves your security operation capabilities. - -[→ View full connector details](connectors/symantec.md) - ---- - ### [Syslog via AMA](connectors/syslogama.md) **Publisher:** Microsoft @@ -4932,20 +4411,6 @@ Microsoft Sentinel integrates with TAXII 2.1 servers to enable exporting of your --- -### [Transmit Security Connector](connectors/transmitsecurity.md) - -**Publisher:** TransmitSecurity - -**Solution:** [TransmitSecurity](solutions/transmitsecurity.md) - -**Tables (1):** `TransmitSecurityActivity_CL` - -The [Transmit Security] data connector provides the capability to ingest common Transmit Security API events into Microsoft Sentinel through the REST API. [Refer to API documentation for more information](https://developer.transmitsecurity.com/). The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more. - -[→ View full connector details](connectors/transmitsecurity.md) - ---- - ### [Trend Micro Cloud App Security](connectors/trendmicrocas.md) **Publisher:** Trend Micro @@ -5146,97 +4611,6 @@ VirtualMetric Director Proxy deploys an Azure Function App to securely bridge Vi ## W -### [Windows DNS Events via AMA](connectors/asimdnsactivitylogs.md) - -**Publisher:** Microsoft - -**Solution:** [Windows Server DNS](solutions/windows-server-dns.md) - -**Tables (1):** `ASimDnsActivityLogs` - -The Windows DNS log connector allows you to easily filter and stream all analytics logs from your Windows DNS servers to your Microsoft Sentinel workspace using the Azure Monitoring agent (AMA). Having this data in Microsoft Sentinel helps you identify issues and security threats such as: -- Trying to resolve malicious domain names. -- Stale resource records. -- Frequently queried domain names and talkative DNS clients. -- Attacks performed on DNS server. - -You can get the following insights into your Windows DNS servers from Microsoft Sentinel: -- All logs centralized in a single place. -- Request load on DNS servers. -- Dynamic DNS registration failures. - -Windows DNS events are supported by Advanced SIEM Information Model (ASIM) and stream data into the ASimDnsActivityLogs table. [Learn more](https://docs.microsoft.com/azure/sentinel/normalization). - -For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2225993&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -[→ View full connector details](connectors/asimdnsactivitylogs.md) - ---- - -### [Windows Firewall](connectors/windowsfirewall.md) - -**Publisher:** Microsoft - -**Solution:** [Windows Firewall](solutions/windows-firewall.md) - -**Tables (1):** `WindowsFirewall` - -Windows Firewall is a Microsoft Windows application that filters information coming to your system from the Internet and blocking potentially harmful programs. The software blocks most programs from communicating through the firewall. Users simply add a program to the list of allowed programs to allow it to communicate through the firewall. When using a public network, Windows Firewall can also secure the system by blocking all unsolicited attempts to connect to your computer. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2219791&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -[→ View full connector details](connectors/windowsfirewall.md) - ---- - -### [Windows Firewall Events via AMA](connectors/windowsfirewallama.md) - -**Publisher:** Microsoft - -**Solution:** [Windows Firewall](solutions/windows-firewall.md) - -**Tables (1):** `ASimNetworkSessionLogs` - -Windows Firewall is a Microsoft Windows application that filters information coming to your system from the internet and blocking potentially harmful programs. The firewall software blocks most programs from communicating through the firewall. To stream your Windows Firewall application logs collected from your machines, use the Azure Monitor agent (AMA) to stream those logs to the Microsoft Sentinel workspace. - -A configured data collection endpoint (DCE) is required to be linked with the data collection rule (DCR) created for the AMA to collect logs. For this connector, a DCE is automatically created in the same region as the workspace. If you already use a DCE stored in the same region, it's possible to change the default created DCE and use your existing one through the API. DCEs can be located in your resources with **SentinelDCE** prefix in the resource name. - -For more information, see the following articles: -- [Data collection endpoints in Azure Monitor](https://learn.microsoft.com/azure/azure-monitor/essentials/data-collection-endpoint-overview?tabs=portal) -- [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2228623&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci) - -[→ View full connector details](connectors/windowsfirewallama.md) - ---- - -### [Windows Forwarded Events](connectors/windowsforwardedevents.md) - -**Publisher:** Microsoft - -**Solution:** [Windows Forwarded Events](solutions/windows-forwarded-events.md) - -**Tables (1):** `WindowsEvent` - -You can stream all Windows Event Forwarding (WEF) logs from the Windows Servers connected to your Microsoft Sentinel workspace using Azure Monitor Agent (AMA). - This connection enables you to view dashboards, create custom alerts, and improve investigation. - This gives you more insight into your organization’s network and improves your security operation capabilities. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2219963&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -[→ View full connector details](connectors/windowsforwardedevents.md) - ---- - -### [Windows Security Events via AMA](connectors/windowssecurityevents.md) - -**Publisher:** Microsoft - -**Solution:** [Windows Security Events](solutions/windows-security-events.md) - -**Tables (1):** `SecurityEvent` - -You can stream all security events from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization’s network and improves your security operation capabilities. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220225&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -[→ View full connector details](connectors/windowssecurityevents.md) - ---- - ### [WithSecure Elements API (Azure Function)](connectors/withsecureelementsviafunction.md) **Publisher:** WithSecure @@ -5377,7 +4751,7 @@ The [Zoom](https://zoom.us/) Reports data connector provides the capability to i ## Deprecated Connectors -The following **142 connector(s)** are deprecated: +The following **140 connector(s)** are deprecated: ### [[DEPRECATED] Cisco Secure Endpoint (AMP)](connectors/ciscosecureendpoint.md) @@ -5787,20 +5161,6 @@ The Barracuda Web Application Firewall (WAF) connector allows you to easily conn --- -### [[Deprecated] Blackberry CylancePROTECT](connectors/blackberrycylanceprotect.md) - -**Publisher:** Blackberry - -**Solution:** [Blackberry CylancePROTECT](solutions/blackberry-cylanceprotect.md) - -**Tables (1):** `Syslog` - -The [Blackberry CylancePROTECT](https://www.blackberry.com/us/en/products/blackberry-protect) connector allows you to easily connect your CylancePROTECT logs with Microsoft Sentinel. This gives you more insight into your organization's network and improves your security operation capabilities. - -[→ View full connector details](connectors/blackberrycylanceprotect.md) - ---- - ### [[Deprecated] Broadcom Symantec DLP via AMA](connectors/broadcomsymantecdlpama.md) **Publisher:** Broadcom @@ -6523,6 +5883,20 @@ The Illusive Platform Connector allows you to share Illusive's attack surface an --- +### [[Deprecated] Infoblox Cloud Data Connector via AMA](connectors/infobloxclouddataconnectorama.md) + +**Publisher:** Infoblox + +**Solution:** [Infoblox Cloud Data Connector](solutions/infoblox-cloud-data-connector.md) + +**Tables (1):** `CommonSecurityLog` + +The Infoblox Cloud Data Connector allows you to easily connect your Infoblox BloxOne data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log. + +[→ View full connector details](connectors/infobloxclouddataconnectorama.md) + +--- + ### [[Deprecated] Infoblox Cloud Data Connector via Legacy Agent](connectors/infobloxclouddataconnector.md) **Publisher:** Infoblox @@ -7345,34 +6719,6 @@ The [Zscaler Private Access (ZPA)](https://help.zscaler.com/zpa/what-zscaler-pri --- -### [[Deprecated] Zscaler via AMA](connectors/zscalerama.md) - -**Publisher:** Zscaler - -**Solution:** [Zscaler Internet Access](solutions/zscaler-internet-access.md) - -**Tables (1):** `CommonSecurityLog` - -The Zscaler data connector allows you to easily connect your Zscaler Internet Access (ZIA) logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. Using Zscaler on Microsoft Sentinel will provide you more insights into your organization’s Internet usage, and will enhance its security operation capabilities.​ - -[→ View full connector details](connectors/zscalerama.md) - ---- - -### [[Deprecated] Zscaler via Legacy Agent](connectors/zscaler.md) - -**Publisher:** Zscaler - -**Solution:** [Zscaler Internet Access](solutions/zscaler-internet-access.md) - -**Tables (1):** `CommonSecurityLog` - -The Zscaler data connector allows you to easily connect your Zscaler Internet Access (ZIA) logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. Using Zscaler on Microsoft Sentinel will provide you more insights into your organization’s Internet usage, and will enhance its security operation capabilities.​ - -[→ View full connector details](connectors/zscaler.md) - ---- - ### [[Deprecated] iboss via Legacy Agent](connectors/iboss.md) **Publisher:** iboss diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/1password.md b/Tools/Solutions Analyzer/connector-docs/connectors/1password.md index 9fe3e4585e5..45aa29de081 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/1password.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/1password.md @@ -22,4 +22,43 @@ This solution takes a dependency on the following technologies, and some of thes - [Azure Functions](https://azure.microsoft.com/services/functions/#overview) +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **1Password API Token**: A 1Password API Token is required. [See the documentation to learn more about the 1Password API](https://developer.1password.com/docs/events-api/reference). **Note:** A 1Password account is required + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to 1Password to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Configuration steps for the 1Password API** + + [Follow these instructions](https://support.1password.com/events-reporting/#appendix-issue-or-revoke-bearer-tokens) provided by 1Password to obtain an API Token. **Note:** A 1Password account is required + +**STEP 2 - Deploy the functionApp using DeployToAzure button to create the table, dcr and the associated Azure Function** + +>**IMPORTANT:** Before deploying the 1Password connector, a custom table needs to be created. + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +This method provides an automated deployment of the 1Password connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-OnePassword-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace Name**, **Workspace Name**, **API Key**, and **URI**. + - The default **Time Interval** is set to pull the last five (5) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. + - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/1passwordccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/1passwordccpdefinition.md index 3c894680386..f1751e3ea45 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/1passwordccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/1passwordccpdefinition.md @@ -10,4 +10,31 @@ The 1Password CCP connector allows the user to ingest 1Password Audit, Signin & ItemUsage events into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **1Password API token**: A 1Password API Token is required. See the [1Password documentation](https://support.1password.com/events-reporting/#appendix-issue-or-revoke-bearer-tokens) on how to create an API token. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. STEP 1 - Create a 1Password API token:** + +Follow the [1Password documentation](https://support.1password.com/events-reporting/#appendix-issue-or-revoke-bearer-tokens) for guidance on this step. + +**2. STEP 2 - Choose the correct base URL:** + +There are multiple 1Password servers which might host your events. The correct server depends on your license and region. Follow the [1Password documentation](https://developer.1password.com/docs/events-api/reference/#servers) to choose the correct server. Input the base URL as displayed by the documentation (including 'https://' and without a trailing '/'). + +**3. STEP 3 - Enter your 1Password Details:** + +Enter the 1Password base URL & API Token below: +- **Base Url**: Enter your Base Url +- **API Token**: (password field) +- Click 'connect' to establish connection + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/42crunchapiprotection.md b/Tools/Solutions Analyzer/connector-docs/connectors/42crunchapiprotection.md index 78b89392758..9169a7fc99c 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/42crunchapiprotection.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/42crunchapiprotection.md @@ -10,4 +10,52 @@ Connects the 42Crunch API protection to Azure Log Analytics via the REST API interface +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Step 1 : Read the detailed documentation** + +The installation process is documented in great detail in the GitHub repository [Microsoft Sentinel integration](https://github.com/42Crunch/azure-sentinel-integration). The user should consult this repository further to understand installation and debug of the integration. + +**2. Step 2: Retrieve the workspace access credentials** + +The first installation step is to retrieve both your **Workspace ID** and **Primary Key** from the Microsoft Sentinel platform. +Copy the values shown below and save them for configuration of the API log forwarder integration. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Step 3: Install the 42Crunch protection and log forwarder** + +The next step is to install the 42Crunch protection and log forwarder to protect your API. Both components are availabe as containers from the [42Crunch repository](https://hub.docker.com/u/42crunch). The exact installation will depend on your environment, consult the [42Crunch protection documentation](https://docs.42crunch.com/latest/content/concepts/api_firewall_deployment_architecture.htm) for full details. Two common installation scenarios are described below: +**Installation via Docker Compose** + + The solution can be installed using a [Docker compose file](https://github.com/42Crunch/azure-sentinel-integration/blob/main/sample-deployment/docker-compose.yml). + + **Installation via Helm charts** + + The solution can be installed using a [Helm chart](https://github.com/42Crunch/azure-sentinel-integration/tree/main/helm/sentinel). +**4. Step 4: Test the data ingestion** + +In order to test the data ingestion the user should deploy the sample *httpbin* application alongside the 42Crunch protection and log forwarder [described in detail here](https://github.com/42Crunch/azure-sentinel-integration/tree/main/sample-deployment). +**4.1 Install the sample** + + The sample application can be installed locally using a [Docker compose file](https://github.com/42Crunch/azure-sentinel-integration/blob/main/sample-deployment/docker-compose.yml) which will install the httpbin API server, the 42Crunch API protection and the Microsoft Sentinel log forwarder. Set the environment variables as required using the values copied from step 2. + + **4.2 Run the sample** + + Verfify the API protection is connected to the 42Crunch platform, and then exercise the API locally on the *localhost* at port 8080 using Postman, curl, or similar. You should see a mixture of passing and failing API calls. + + **4.3 Verify the data ingestion on Log Analytics** + + After approximately 20 minutes access the Log Analytics workspace on your Microsoft Sentinel installation, and locate the *Custom Logs* section verify that a *apifirewall_log_1_CL* table exists. Use the sample queries to examine the data. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/abnormalsecurity.md b/Tools/Solutions Analyzer/connector-docs/connectors/abnormalsecurity.md index 73a60027c43..e289a81c27c 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/abnormalsecurity.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/abnormalsecurity.md @@ -10,4 +10,91 @@ The Abnormal Security data connector provides the capability to ingest threat and case logs into Microsoft Sentinel using the [Abnormal Security Rest API.](https://app.swaggerhub.com/apis/abnormal-security/abx/) +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Abnormal Security API Token**: An Abnormal Security API Token is required. [See the documentation to learn more about Abnormal Security API](https://app.swaggerhub.com/apis/abnormal-security/abx/). **Note:** An Abnormal Security account is required + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to Abnormal Security's REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +**STEP 1 - Configuration steps for the Abnormal Security API** + + [Follow these instructions](https://app.swaggerhub.com/apis/abnormal-security/abx) provided by Abnormal Security to configure the REST API integration. **Note:** An Abnormal Security account is required + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Abnormal Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Abnormal Security API Authorization Token, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +This method provides an automated deployment of the Abnormal Security connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-abnormalsecurity-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Microsoft Sentinel Workspace ID**, **Microsoft Sentinel Shared Key** and **Abnormal Security REST API Key**. + - The default **Time Interval** is set to pull the last five (5) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. + 4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Abnormal Security data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-abnormalsecurity-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. AbnormalSecurityXX). + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + SENTINEL_WORKSPACE_ID + SENTINEL_SHARED_KEY + ABNORMAL_SECURITY_REST_API_TOKEN + logAnalyticsUri (optional) +(add any other settings required by the Function App) +Set the `uri` value to: `` +>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Azure Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us.` +4. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/agari.md b/Tools/Solutions Analyzer/connector-docs/connectors/agari.md index a8e327b370d..4142f95f44a 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/agari.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/agari.md @@ -10,4 +10,110 @@ This connector uses a Agari REST API connection to push data into Azure Sentinel Log Analytics. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Agari Phishing Defense, Phishing Response or Brand Protection API Client ID and Secret**: Ensure you have your Client ID and Secret keys. Instructions can be found on the [Agari Developers Site](https://developers.agari.com/agari-platform/docs/quick-start). +- **(Optional) Microsoft Security Graph API**: The Agari Function App has the ability to share threat intelleigence with Sentinel via the Security Graph API. To use this feature, you will need to enable the [Sentinel Threat Intelligence Platforms connector](https://docs.microsoft.com/azure/sentinel/connect-threat-intelligence) as well as register an application in Azure Active Directory. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Agari APIs to pull its logs into Azure Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**1. STEP 1 - Get your Agari API credentials** + +1. Log into any Agari product (Client ID and Secret are the same for all applications) +2. Click on your username in the upper right and select **Settings** +3. Click on the **Generate API Secret** link to generate an API client_id and client_secret (the link will read **Regenerate API Secret** if you have already generated an API client ID/secret previously) +4. Copy both the client_id and client_secret that are generated + +**2. STEP 2 - (Optional) Enable the Security Graph API** + +Follow the instrcutions found on article [Connect Azure Sentinel to your threat intelligence platform](https://docs.microsoft.com/azure/sentinel/connect-threat-intelligence#connect-azure-sentinel-to-your-threat-intelligence-platform). Once the application is created you will need to record the Tenant ID, Client ID and Client Secret. + +**3. STEP 3 - Deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Agari Connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Agari API credentials from the previous step. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**4. Choose a deployement option** + +**5. Option 1: Deploy using the Azure Resource Manager (ARM) Template** + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-agari-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID**, **Workspace Key**, **Agari Client ID**, **Agari Client Secret**, select `True` or `False` for the products you subscribe to, and if you wish to share IoCs with Sentinel, select `True` For **Enable Security Graph Sharing**, and enter the required IDs from the Azure Application. +> - The Function App will request data from the Agari APIs every 5 minutes, corresponding to the Funciton App Timer. +> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. +6. **NOTE:** Due to the use of Environment Variables to store log access times, the App requires 1 additonal manual step. In the Function App, select the Function App Name and select Click on **Identity** and for System assigned Identity, click on **Azure role assignments** and **Add Role assignment**. Select **Subscription** as the scope, select your subscription and set the Role to **Contributor**. Click on **Save**. + +**6. Option 2: Manual Deployment of Azure Functions** + +**1. Create a Function App** + +1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp), and select **+ Add**. +2. In the **Basics** tab, ensure Runtime stack is set to **Powershell Core**. +3. In the **Hosting** tab, ensure the **Consumption (Serverless)** plan type is selected. +4. Make other preferrable configuration changes, if needed, then click **Create**. + +**2. Import Function App Code** + +1. In the newly created Function App, select **Functions** on the left pane and click **+ Add**. +2. Click on **Code + Test** on the left pane. +3. Copy the [Function App Code](https://aka.ms/sentinel-agari-functionapp) and paste into the Function App `run.ps1` editor. +3. Click **Save**. + +**3. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following eight to twelve (8-12) application settings individually, with their respective string values (case-sensitive): + clientID + clientSecret + workspaceID + workspaceKey + enableBrandProtectionAPI + enablePhishingResponseAPI + enablePhishingDefenseAPI + resGroup + functionName + subId + enableSecurityGraphSharing + <--- Required if enableSecurityGraphSharing is set to true ---> + GraphTenantId + GraphClientId + GraphClientSecret + logAnalyticsUri (optional) +> - Enter your Agari ClientID and Secret in 'clientId' and 'clientSecret' +> - Enter 'true' or 'false' for 'enablePhishingDefense', 'enableBrandProtection', 'enablePhishingResponse' as per your product subscriptions. +> - Enter your Resource Group name in resGroup, the name of the Function (from previous step) in functionName and your Subscription ID in subId. +> - Enter 'true' or 'false' for 'enableSecurtyGraphAPI'. If you are enabling the Security Graph, the 'GraphTenantId','GraphClientId', and 'GraphClientSecret' is required. +> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. + +**4. Set Permissions for the App** + +1. In the Function App, select the Function App Name and select Click on **Identity** and for System assigned Identity, set the status to On. + +2. Next, click on **Azure role assignments** and **Add Role assignment**. Select **Subscription** as the scope, select your subscription and set the Role to **Contributor**. Click on **Save**. + +**5. Complete Setup.** + +1. Once all application settings have been entered, click **Save**. Note that it will take some time to have the required dependencies download, so you may see some inital failure messages. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/aivectradetect.md b/Tools/Solutions Analyzer/connector-docs/connectors/aivectradetect.md index 163e12de7ca..376c312041b 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/aivectradetect.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/aivectradetect.md @@ -10,4 +10,75 @@ The AI Vectra Detect connector allows users to connect Vectra Detect logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives users more insight into their organization's network and improves their security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 over TCP, UDP or TLS. + +> 1. Make sure that you have Python on your machine using the following command: python --version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward AI Vectra Detect logs to Syslog agent in CEF format** + +Configure Vectra (X Series) Agent to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent. + +From the Vectra UI, navigate to Settings > Notifications and Edit Syslog configuration. Follow below instructions to set up the connection: + +- Add a new Destination (which is the host where the Microsoft Sentinel Syslog Agent is running) + +- Set the Port as **514** + +- Set the Protocol as **UDP** + +- Set the format to **CEF** + +- Set Log types (Select all log types available) + +- Click on **Save** + +User can click the **Test** button to force send some test events. + + For more information, refer to Cognito Detect Syslog Guide which can be downloaded from the ressource page in Detect UI. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python --version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/aivectradetectama.md b/Tools/Solutions Analyzer/connector-docs/connectors/aivectradetectama.md index 27df9ab12db..36d3c38a07b 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/aivectradetectama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/aivectradetectama.md @@ -10,4 +10,76 @@ The AI Vectra Detect connector allows users to connect Vectra Detect logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives users more insight into their organization's network and improves their security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward AI Vectra Detect logs to Syslog agent in CEF format** + + Configure Vectra (X Series) Agent to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent. + +From the Vectra UI, navigate to Settings > Notifications and Edit Syslog configuration. Follow below instructions to set up the connection: + +- Add a new Destination (which is the host where the Microsoft Sentinel Syslog Agent is running) + +- Set the Port as **514** + +- Set the Protocol as **UDP** + +- Set the format to **CEF** + +- Set Log types (Select all log types available) + +- Click on **Save** + +User can click the **Test** button to force send some test events. + + For more information, refer to Cognito Detect Syslog Guide which can be downloaded from the ressource page in Detect UI. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/aivectrastream.md b/Tools/Solutions Analyzer/connector-docs/connectors/aivectrastream.md index 27cad856112..528b89b0c9a 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/aivectrastream.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/aivectrastream.md @@ -10,4 +10,70 @@ The AI Vectra Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Vectra AI Brain**: must be configured to export Stream metadata in JSON + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected **VectraStream** which is deployed with the Microsoft Sentinel Solution. + +**1. Install and onboard the agent for Linux** + +Install the Linux agent on sperate Linux instance. + +> Logs are collected only from **Linux** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Follow the configuration steps below to get Vectra Stream metadata into Microsoft Sentinel. The Log Analytics agent is leveraged to send custom JSON into Azure Monitor, enabling the storage of the metadata into a custom table. For more information, refer to the [Azure Monitor Documentation](https://docs.microsoft.com/azure/azure-monitor/agents/data-sources-json). +1. Download config file for the log analytics agent: VectraStream.conf (located in the Connector folder within the Vectra solution: https://aka.ms/sentinel-aivectrastream-conf). +2. Login to the server where you have installed Azure Log Analytics agent. +3. Copy VectraStream.conf to the /etc/opt/microsoft/omsagent/**workspace_id**/conf/omsagent.d/ folder. +4. Edit VectraStream.conf as follows: + + i. configure an alternate port to send data to, if desired. Default port is 29009. + + ii. replace **workspace_id** with real value of your Workspace ID. +5. Save changes and restart the Azure Log Analytics agent for Linux service with the following command: + sudo /opt/microsoft/omsagent/bin/service_control restart +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Configure and connect Vectra AI Stream** + +Configure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via the Log Analytics Agent. + +From the Vectra UI, navigate to Settings > Cognito Stream and Edit the destination configuration: + +- Select Publisher: RAW JSON + +- Set the server IP or hostname (which is the host which run the Log Analytics Agent) + +- Set all the port to **29009** (this port can be modified if required) + +- Save + +- Set Log types (Select all log types available) + +- Click on **Save** + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/akamaisecurityevents.md b/Tools/Solutions Analyzer/connector-docs/connectors/akamaisecurityevents.md index 14fb2380e76..83bbf704ba5 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/akamaisecurityevents.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/akamaisecurityevents.md @@ -10,4 +10,59 @@ Akamai Solution for Microsoft Sentinel provides the capability to ingest [Akamai Security Events](https://www.akamai.com/us/en/products/security/) into Microsoft Sentinel. Refer to [Akamai SIEM Integration documentation](https://developer.akamai.com/tools/integrations/siem) for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Akamai Security Events and load the function code or click [here](https://aka.ms/sentinel-akamaisecurityevents-parser), on the second line of the query, enter the hostname(s) of your Akamai Security Events device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Common Event Format (CEF) logs to Syslog agent** + +[Follow these steps](https://developer.akamai.com/tools/integrations/siem) to configure Akamai CEF connector to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/akamaisecurityeventsama.md b/Tools/Solutions Analyzer/connector-docs/connectors/akamaisecurityeventsama.md index 9f98a109e1e..4d70ab7d7f7 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/akamaisecurityeventsama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/akamaisecurityeventsama.md @@ -10,4 +10,59 @@ Akamai Solution for Microsoft Sentinel provides the capability to ingest [Akamai Security Events](https://www.akamai.com/us/en/products/security/) into Microsoft Sentinel. Refer to [Akamai SIEM Integration documentation](https://developer.akamai.com/tools/integrations/siem) for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Akamai Security Events and load the function code or click [here](https://aka.ms/sentinel-akamaisecurityevents-parser), on the second line of the query, enter the hostname(s) of your Akamai Security Events device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Common Event Format (CEF) logs to Syslog agent** + + [Follow these steps](https://developer.akamai.com/tools/integrations/siem) to configure Akamai CEF connector to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/alicloudactiontrailccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/alicloudactiontrailccpdefinition.md index 5a9b0844a2b..5ccf21a0435 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/alicloudactiontrailccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/alicloudactiontrailccpdefinition.md @@ -10,4 +10,54 @@ The [Alibaba Cloud ActionTrail](https://www.alibabacloud.com/product/actiontrail) data connector provides the capability to retrieve actiontrail events stored into [Alibaba Cloud Simple Log Service](https://www.alibabacloud.com/product/log-service) and store them into Microsoft Sentinel through the [SLS REST API](https://www.alibabacloud.com/help/sls/developer-reference/api-sls-2020-12-30-getlogs). The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **SLS REST API Credentials/permissions**: **AliCloudAccessKeyId** and **AliCloudAccessKeySecret** are required for making API calls. RAM policy statement with action of atleast `log:GetLogStoreLogs` over resource `acs:log:{#regionId}:{#accountId}:project/{#ProjectName}/logstore/{#LogstoreName}` is needed to grant a RAM user the permissions to call this operation. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Configure access to AliCloud SLS API** + +Before using the API, you need to prepare your identity account and access key pair to effectively access the API. +1. We recommend that you use a Resource Access Management (RAM) user to call API operations. For more information, see [create a RAM user and authorize the RAM user to access Simple Log Service](https://www.alibabacloud.com/help/sls/create-a-ram-user-and-authorize-the-ram-user-to-access-log-service). +2. Obtain the access key pair for the RAM user. For details see [get Access Key pair](https://www.alibabacloud.com/help/ram/user-guide/create-an-accesskey-pair). + +Note the access key pair details for the next step. + +**2. Add ActionTrail Logstore** + +To enable the Alibaba Cloud ActionTrail connector for Microsoft Sentinel, click upon add ActionTrail Logstore, fill the form with the Alibaba Cloud environment configuration and click Connect. +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **AliCloud SLS Logstore Endpoint URL** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + +**Add ActionTrail Logstore** + +*Add SLS Logstore linked to Alibaba Cloud ActionTrail* + +When you click the "Add Logstore" button in the portal, a configuration form will open. You'll need to provide: + +- **Alibaba Cloud SLS Public Endpoint** (optional): .log.aliyuncs.com +- **Project** (optional): +- **Logstore** (optional): +- **Access Key ID** (optional): Access Key ID +- **Access Key Secret** (optional): Access Key Secret + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/alsidforad.md b/Tools/Solutions Analyzer/connector-docs/connectors/alsidforad.md index a5006e81e11..bcbb4ea5efb 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/alsidforad.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/alsidforad.md @@ -12,4 +12,61 @@ Alsid for Active Directory connector allows to export Alsid Indicators of Exposu It provides a data parser to manipulate the logs more easily. The different workbooks ease your Active Directory monitoring and provide different ways to visualize the data. The analytic templates allow to automate responses regarding different events, exposures, or attacks. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-alsidforad-parser) to create the Kusto Functions alias, **afad_parser** + +**1. Configure the Syslog server** + +You will first need a **linux Syslog** server that Alsid for AD will send logs to. Typically you can run **rsyslog** on **Ubuntu**. + You can then configure this server as you wish, but it is recommended to be able to output AFAD logs in a separate file. +Alternatively you can use [this Quickstart template](https://azure.microsoft.com/resources/templates/alsid-syslog-proxy/) which will deploy the Syslog server and the Microsoft agent for you. If you do use this template, you can skip step 3. + +**2. Configure Alsid to send logs to your Syslog server** + +On your **Alsid for AD** portal, go to *System*, *Configuration* and then *Syslog*. +From there you can create a new Syslog alert toward your Syslog server. + +Once this is done, check that the logs are correctly gathered on your server in a seperate file (to do this, you can use the *Test the configuration* button in the Syslog alert configuration in AFAD). +If you used the Quickstart template, the Syslog server will by default listen on port 514 in UDP and 1514 in TCP, without TLS. + +**3. Install and onboard the Microsoft agent for Linux** + +You can skip this step if you used the Quickstart template in step 1 +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**4. Configure the logs to be collected by the agents** + +Configure the agent to collect the logs. + +1. Under workspace advanced settings **Configuration**, select **Data** and then **Custom Logs**. +2. Select **Apply below configuration to my machines** and click **Add**. +3. Upload a sample AFAD Syslog file from the **Linux** machine running the **Syslog** server and click **Next**, for your convenience, you can find such a file [here](https://github.com/Azure/azure-quickstart-templates/blob/master/alsid-syslog-proxy/logs/AlsidForAD.log). +4. Set the record delimiter to **New Line** if not already the case and click **Next**. +5. Select **Linux** and enter the file path to the **Syslog** file, click **+** then **Next**. If you used the Quickstart template in step 1, the default location of the file is `/var/log/AlsidForAD.log`. +6. Set the **Name** to *AlsidForADLog_CL* then click **Done** (Azure automatically adds *_CL* at the end of the name, there must be only one, make sure the name is not *AlsidForADLog_CL_CL*). + +All of these steps are showcased [here](https://www.youtube.com/watch?v=JwV1uZSyXM4&feature=youtu.be) as an example +- **Open Syslog settings** + +> You should now be able to receive logs in the *AlsidForADLog_CL* table, logs data can be parse using the **afad_parser()** function, used by all query samples, workbooks and analytic templates. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/anvilogicccfdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/anvilogicccfdefinition.md index 466a0d5c220..e4d90d69708 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/anvilogicccfdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/anvilogicccfdefinition.md @@ -10,4 +10,27 @@ The Anvilogic data connector allows you to pull events of interest generated in the Anvilogic ADX cluster into your Microsoft Sentinel +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **Anvilogic Application Registration Client ID and Client Secret**: To access the Anvilogic ADX we require the client id and client secret from the Anvilogic app registration + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect to Anvilogic to start collecting events of interest in Microsoft Sentinel** + +Complete the form to ingest Anvilogic Alerts into your Microsoft Sentinel +- **Token Endpoint**: https://login[.]microsoftonline[.]com//oauth2/v2.0/token +- **Anvilogic ADX Scope**: /.default +- **Anvilogic ADX Request URI**: /v2/rest/query +- **OAuth Configuration**: + - Client ID + - Client Secret + - Click 'Connect' to authenticate + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/apachehttpserver.md b/Tools/Solutions Analyzer/connector-docs/connectors/apachehttpserver.md index 86ce6ad1d76..042886ef581 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/apachehttpserver.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/apachehttpserver.md @@ -10,4 +10,60 @@ The Apache HTTP Server data connector provides the capability to ingest [Apache HTTP Server](http://httpd.apache.org/) events into Microsoft Sentinel. Refer to [Apache Logs documentation](https://httpd.apache.org/docs/2.4/logs.html) for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias ApacheHTTPServer and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ApacheHTTPServer/Parsers/ApacheHTTPServer.txt). The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux or Windows** + +Install the agent on the Apache HTTP Server where the logs are generated. + +> Logs from Apache HTTP Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure the logs to be collected** + +Configure the custom log directory to be collected +- **Open custom logs settings** + +1. Select the link above to open your workspace advanced settings +2. From the left pane, select **Data**, select **Custom Logs** and click **Add+** +3. Click **Browse** to upload a sample of a Apache HTTP Server log file (e.g. access.log or error.log). Then, click **Next >** +4. Select **New line** as the record delimiter and click **Next >** +5. Select **Windows** or **Linux** and enter the path to Apache HTTP logs based on your configuration. Example: + - **Windows** directory: `C:\Server\bin\Apache24\logs\*.log` + - **Linux** Directory: `/var/log/httpd/*.log` +6. After entering the path, click the '+' symbol to apply, then click **Next >** +7. Add **ApacheHTTPServer_CL** as the custom log Name and click **Done** + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/apachetomcat.md b/Tools/Solutions Analyzer/connector-docs/connectors/apachetomcat.md index f03597af901..bca530abcea 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/apachetomcat.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/apachetomcat.md @@ -10,4 +10,61 @@ The Apache Tomcat solution provides the capability to ingest [Apache Tomcat](http://tomcat.apache.org/) events into Microsoft Sentinel. Refer to [Apache Tomcat documentation](http://tomcat.apache.org/tomcat-10.0-doc/logging.html) for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias TomcatEvent and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tomcat/Parsers/TomcatEvent.txt).The function usually takes 10-15 minutes to activate after solution installation/update. + +>**NOTE:** This data connector has been developed using Apache Tomcat version 10.0.4 + +**1. Install and onboard the agent for Linux or Windows** + +Install the agent on the Apache Tomcat Server where the logs are generated. + +> Logs from Apache Tomcat Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure the logs to be collected** + +Configure the custom log directory to be collected +- **Open custom logs settings** + +1. Select the link above to open your workspace advanced settings +2. From the left pane, select **Data**, select **Custom Logs** and click **Add+** +3. Click **Browse** to upload a sample of a Tomcat log file (e.g. access.log or error.log). Then, click **Next >** +4. Select **New line** as the record delimiter and click **Next >** +5. Select **Windows** or **Linux** and enter the path to Tomcat logs based on your configuration. Example: + - **Linux** Directory: '/var/log/tomcat/*.log' +6. After entering the path, click the '+' symbol to apply, then click **Next >** +7. Add **Tomcat_CL** as the custom log Name and click **Done** + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/apigeexdataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/apigeexdataconnector.md index 81b8964923b..7070fd02950 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/apigeexdataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/apigeexdataconnector.md @@ -14,4 +14,96 @@ The [Google ApigeeX](https://cloud.google.com/apigee/docs) data connector provid

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

+## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **GCP service account**: GCP service account with permissions to read logs is required for GCP Logging API. Also json file with service account key is required. See the documentation to learn more about [required permissions](https://cloud.google.com/iam/docs/audit-logging#audit_log_permissions), [creating service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts) and [creating service account key](https://cloud.google.com/iam/docs/creating-managing-service-account-keys). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the GCP API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**ApigeeX**](https://aka.ms/sentinel-ApigeeXDataConnector-parser) which is deployed with the Microsoft Sentinel Solution. + +**STEP 1 - Configuring GCP and obtaining credentials** + +1. Make sure that Logging API is [enabled](https://cloud.google.com/apis/docs/getting-started#enabling_apis). + +2. [Create service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts) with [required permissions](https://cloud.google.com/iam/docs/audit-logging#audit_log_permissions) and [get service account key json file](https://cloud.google.com/iam/docs/creating-managing-service-account-keys). + +3. Prepare GCP project ID where ApigeeX is located. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Azure Blob Storage connection string and container name, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ApigeeXDataConnector-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Google Cloud Platform Project Id**, **Google Cloud Platform Credentials File Content**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key** +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-ApigeeXDataConnector-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + RESOURCE_NAMES + CREDENTIALS_FILE_CONTENT + WORKSPACE_ID + SHARED_KEY + logAnalyticsUri (Optional) + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/argoscloudsecurity.md b/Tools/Solutions Analyzer/connector-docs/connectors/argoscloudsecurity.md index cf86d8d43e5..4bc12784dc6 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/argoscloudsecurity.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/argoscloudsecurity.md @@ -10,4 +10,36 @@ The ARGOS Cloud Security integration for Microsoft Sentinel allows you to have all your important cloud security events in one place. This enables you to easily create dashboards, alerts, and correlate events across multiple systems. Overall this will improve your organization's security posture and security incident response. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Subscribe to ARGOS** + +Ensure you already own an ARGOS Subscription. If not, browse to [ARGOS Cloud Security](https://argos-security.io) and sign up to ARGOS. + +Alternatively, you can also purchase ARGOS via the [Azure Marketplace](https://azuremarketplace.microsoft.com/en-au/marketplace/apps/argoscloudsecurity1605618416175.argoscloudsecurity?tab=Overview). + +**2. Configure Sentinel integration from ARGOS** + +Configure ARGOS to forward any new detections to your Sentinel workspace by providing ARGOS with your Workspace ID and Primary Key. + +There is **no need to deploy any custom infrastructure**. + +Enter the information into the [ARGOS Sentinel](https://app.argos-security.io/account/sentinel) configuration page. + +New detections will automatically be forwarded. + +[Learn more about the integration](https://www.argos-security.io/resources#integrations) +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/aristaawakesecurity.md b/Tools/Solutions Analyzer/connector-docs/connectors/aristaawakesecurity.md index e44731c6c13..030ac5d9221 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/aristaawakesecurity.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/aristaawakesecurity.md @@ -10,4 +10,70 @@ The Awake Security CEF connector allows users to send detection model matches from the Awake Security Platform to Microsoft Sentinel. Remediate threats quickly with the power of network detection and response and speed up investigations with deep visibility especially into unmanaged entities including users, devices and applications on your network. The connector also enables the creation of network security-focused custom alerts, incidents, workbooks and notebooks that align with your existing security operations workflows. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Awake Adversarial Model match results to a CEF collector.** + +Perform the following steps to forward Awake Adversarial Model match results to a CEF collector listening on TCP port **514** at IP **192.168.0.1**: +- Navigate to the Detection Management Skills page in the Awake UI. +- Click + Add New Skill. +- Set the Expression field to, +>integrations.cef.tcp { destination: "192.168.0.1", port: 514, secure: false, severity: Warning } +- Set the Title field to a descriptive name like, +>Forward Awake Adversarial Model match result to Microsoft Sentinel. +- Set the Reference Identifier to something easily discoverable like, +>integrations.cef.sentinel-forwarder +- Click Save. + +Note: Within a few minutes of saving the definition and other fields the system will begin sending new model match results to the CEF events collector as they are detected. + +For more information, refer to the **Adding a Security Information and Event Management Push Integration** page from the Help Documentation in the Awake UI. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/armisactivities.md b/Tools/Solutions Analyzer/connector-docs/connectors/armisactivities.md index 2865156962b..abbaeca7fe5 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/armisactivities.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/armisactivities.md @@ -10,4 +10,108 @@ The [Armis](https://www.armis.com/) Activities connector gives the capability to ingest Armis device Activities into Microsoft Sentinel through the Armis REST API. Refer to the API documentation: `https://.armis.com/api/v1/doc` for more information. The connector provides the ability to get device activity information from the Armis platform. Armis uses your existing infrastructure to discover and identify devices without having to deploy any agents. Armis detects what all devices are doing in your environment and classifies those activities to get a complete picture of device behavior. These activities are analyzed for an understanding of normal and abnormal device behavior and used to assess device and network risk. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **Armis Secret Key** is required. See the documentation to learn more about API on the `https://.armis.com/api/v1/doc` + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Armis API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias ArmisActivities and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis/Parsers/ArmisActivities.yaml). The function usually takes 10-15 minutes to activate after solution installation/update. + +**STEP 1 - Configuration steps for the Armis API** + + Follow these instructions to create an Armis API secret key. + 1. Log into your Armis instance + 2. Navigate to Settings -> API Management + 3. If the secret key has not already been created, press the Create button to create the secret key + 4. To access the secret key, press the Show button + 5. The secret key can now be copied and used during the Armis Activities connector configuration + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Armis Activities data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Armis API Authorization Key(s) +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**4. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Armis connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ArmisActivitiesAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-ArmisActivitiesAPI-azuredeploy-gov) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the below information : + Function Name + Workspace ID + Workspace Key + Armis Secret Key + Armis URL (https://.armis.com/api/v1/) + Armis Activity Table Name + Armis Schedule + Avoid Duplicates (Default: false) +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**5. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Armis Activity data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-ArmisActivitiesAPI-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ARMISXXXXX). + + e. **Select a runtime:** Choose Python 3.11 + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective values (case-sensitive): + Workspace ID + Workspace Key + Armis Secret Key + Armis URL (https://.armis.com/api/v1/) + Armis Activity Table Name + Armis Schedule + Avoid Duplicates (Default: false) + logAnalyticsUri (optional) + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/armisalerts.md b/Tools/Solutions Analyzer/connector-docs/connectors/armisalerts.md index 91810127481..ee100e265e4 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/armisalerts.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/armisalerts.md @@ -10,4 +10,108 @@ The [Armis](https://www.armis.com/) Alerts connector gives the capability to ingest Armis Alerts into Microsoft Sentinel through the Armis REST API. Refer to the API documentation: `https://.armis.com/api/v1/docs` for more information. The connector provides the ability to get alert information from the Armis platform and to identify and prioritize threats in your environment. Armis uses your existing infrastructure to discover and identify devices without having to deploy any agents. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **Armis Secret Key** is required. See the documentation to learn more about API on the `https://.armis.com/api/v1/doc` + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Armis API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-ArmisAlertsAPI-parser) to create the Kusto functions alias, **ArmisAlerts** + +**STEP 1 - Configuration steps for the Armis API** + + Follow these instructions to create an Armis API secret key. + 1. Log into your Armis instance + 2. Navigate to Settings -> API Management + 3. If the secret key has not already been created, press the Create button to create the secret key + 4. To access the secret key, press the Show button + 5. The secret key can now be copied and used during the Armis Alerts connector configuration + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Armis Alert data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Armis API Authorization Key(s) +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Armis connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ArmisAlertsAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-ArmisAlertsAPI-azuredeploy-gov) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the below information : + Function Name + Workspace ID + Workspace Key + Armis Secret Key + Armis URL (https://.armis.com/api/v1/) + Armis Alert Table Name + Armis Schedule + Avoid Duplicates (Default: true) +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Armis Alert data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-ArmisAlertsAPI-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ARMISXXXXX). + + e. **Select a runtime:** Choose Python 3.11 + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective values (case-sensitive): + Workspace ID + Workspace Key + Armis Secret Key + Armis URL (https://.armis.com/api/v1/) + Armis Alert Table Name + Armis Schedule + Avoid Duplicates (Default: true) + logAnalyticsUri (optional) + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/armisalertsactivities.md b/Tools/Solutions Analyzer/connector-docs/connectors/armisalertsactivities.md index c2e3e80b6d9..dd1ffa8f57b 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/armisalertsactivities.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/armisalertsactivities.md @@ -10,4 +10,172 @@ The [Armis](https://www.armis.com/) Alerts Activities connector gives the capability to ingest Armis Alerts and Activities into Microsoft Sentinel through the Armis REST API. Refer to the API documentation: `https://.armis.com/api/v1/docs` for more information. The connector provides the ability to get alert and activity information from the Armis platform and to identify and prioritize threats in your environment. Armis uses your existing infrastructure to discover and identify devices without having to deploy any agents. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **Armis Secret Key** is required. See the documentation to learn more about API on the `https://.armis.com/api/v1/doc` + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Armis API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias ArmisActivities/ArmisAlerts and load the function code. The function usually takes 10-15 minutes to activate after solution installation/update. + +**STEP 1 - Configuration steps for the Armis API** + + Follow these instructions to create an Armis API secret key. + 1. Log into your Armis instance + 2. Navigate to Settings -> API Management + 3. If the secret key has not already been created, press the Create button to create the secret key + 4. To access the secret key, press the Show button + 5. The secret key can now be copied and used during the Armis Alerts Activities connector configuration + +**STEP 2 - App Registration steps for the Application in Microsoft Entra ID** + + This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID: + 1. Sign in to the [Azure portal](https://portal.azure.com/). + 2. Search for and select **Microsoft Entra ID**. + 3. Under **Manage**, select **App registrations > New registration**. + 4. Enter a display **Name** for your application. + 5. Select **Register** to complete the initial app registration. + 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Armis Alerts Activities Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app) + +**STEP 3 - Add a client secret for application in Microsoft Entra ID** + + Sometimes called an application password, a client secret is a string value required for the execution of Armis Alerts Activities Data Connector. Follow the steps in this section to create a new Client Secret: + 1. In the Azure portal, in **App registrations**, select your application. + 2. Select **Certificates & secrets > Client secrets > New client secret**. + 3. Add a description for your client secret. + 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months. + 5. Select **Add**. + 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Armis Alerts Activities Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret) + +**STEP 4 - Assign role of Contributor to application in Microsoft Entra ID** + + Follow the steps in this section to assign the role: + 1. In the Azure portal, Go to **Resource Group** and select your resource group. + 2. Go to **Access control (IAM)** from left panel. + 3. Click on **Add**, and then select **Add role assignment**. + 4. Select **Contributor** as role and click on next. + 5. In **Assign access to**, select `User, group, or service principal`. + 6. Click on **add members** and type **your app name** that you have created and select it. + 7. Now click on **Review + assign** and then again click on **Review + assign**. + +> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal) + +**STEP 5 - Create a Keyvault** + + Follow these instructions to create a new Keyvault. + 1. In the Azure portal, Go to **Key vaults**. Click create. + 2. Select Subsciption, Resource Group and provide unique name of keyvault. + +> **NOTE:** Create a separate key vault for each **API key** within one workspace. + +**STEP 6 - Create Access Policy in Keyvault** + + Follow these instructions to create access policy in Keyvault. + 1. Go to keyvaults, select your keyvault, go to Access policies on left side panel. Click create. + 2. Select all keys & secrets permissions. Click next. + 3. In the principal section, search by application name which was generated in STEP - 2. Click next. + +> **NOTE:** Ensure the Permission model in the Access Configuration of Key Vault is set to **'Vault access policy'** + +**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Armis Alerts Activities data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Armis API Authorization Key(s) +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**8. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Armis connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ArmisAlertsActivitiesAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-ArmisAlertsActivitiesAPI-azuredeploy-gov) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the below information : + Function Name + Workspace ID + Workspace Key + Armis Secret Key + Armis URL (https://.armis.com/api/v1/) + Armis Alert Table Name + Armis Activity Table Name + Severity (Default: Low) + Armis Schedule + KeyVault Name + Azure Client Id + Azure Client Secret + Tenant Id +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**9. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Armis Alerts Activities data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-ArmisAlertsActivitiesAPI311-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ARMISXXXXX). + + e. **Select a runtime:** Choose Python 3.11 + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective values (case-sensitive): + Workspace ID + Workspace Key + Armis Secret Key + Armis URL (https://.armis.com/api/v1/) + Armis Alert Table Name + Armis Activity Table Name + Severity (Default: Low) + Armis Schedule + KeyVault Name + Azure Client Id + Azure Client Secret + Tenant Id + logAnalyticsUri (optional) + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/armisdevices.md b/Tools/Solutions Analyzer/connector-docs/connectors/armisdevices.md index 135ac9d6960..15448c899da 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/armisdevices.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/armisdevices.md @@ -10,4 +10,168 @@ The [Armis](https://www.armis.com/) Device connector gives the capability to ingest Armis Devices into Microsoft Sentinel through the Armis REST API. Refer to the API documentation: `https://.armis.com/api/v1/docs` for more information. The connector provides the ability to get device information from the Armis platform. Armis uses your existing infrastructure to discover and identify devices without having to deploy any agents. Armis can also integrate with your existing IT & security management tools to identify and classify each and every device, managed or unmanaged in your environment. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **Armis Secret Key** is required. See the documentation to learn more about API on the `https://.armis.com/api/v1/doc` + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Armis API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-ArmisDevice-parser) to create the Kusto functions alias, **ArmisDevice** + +**STEP 1 - Configuration steps for the Armis API** + + Follow these instructions to create an Armis API secret key. + 1. Log into your Armis instance + 2. Navigate to Settings -> API Management + 3. If the secret key has not already been created, press the Create button to create the secret key + 4. To access the secret key, press the Show button + 5. The secret key can now be copied and used during the Armis Device connector configuration + +**STEP 2 - App Registration steps for the Application in Microsoft Entra ID** + + This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID: + 1. Sign in to the [Azure portal](https://portal.azure.com/). + 2. Search for and select **Microsoft Entra ID**. + 3. Under **Manage**, select **App registrations > New registration**. + 4. Enter a display **Name** for your application. + 5. Select **Register** to complete the initial app registration. + 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Armis Device Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app) + +**STEP 3 - Add a client secret for application in Microsoft Entra ID** + + Sometimes called an application password, a client secret is a string value required for the execution of Armis Device Data Connector. Follow the steps in this section to create a new Client Secret: + 1. In the Azure portal, in **App registrations**, select your application. + 2. Select **Certificates & secrets > Client secrets > New client secret**. + 3. Add a description for your client secret. + 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months. + 5. Select **Add**. + 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Armis Device Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret) + +**STEP 4 - Assign role of Contributor to application in Microsoft Entra ID** + + Follow the steps in this section to assign the role: + 1. In the Azure portal, Go to **Resource Group** and select your resource group. + 2. Go to **Access control (IAM)** from left panel. + 3. Click on **Add**, and then select **Add role assignment**. + 4. Select **Contributor** as role and click on next. + 5. In **Assign access to**, select `User, group, or service principal`. + 6. Click on **add members** and type **your app name** that you have created and select it. + 7. Now click on **Review + assign** and then again click on **Review + assign**. + +> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal) + +**STEP 5 - Create a Keyvault** + + Follow these instructions to create a new Keyvault. + 1. In the Azure portal, Go to **Key vaults**. Click create. + 2. Select Subsciption, Resource Group and provide unique name of keyvault. + +> **NOTE:** Create a separate key vault for each **API key** within one workspace. + +**STEP 6 - Create Access Policy in Keyvault** + + Follow these instructions to create access policy in Keyvault. + 1. Go to keyvaults, select your keyvault, go to Access policies on left side panel. Click create. + 2. Select all keys & secrets permissions. Click next. + 3. In the principal section, search by application name which was generated in STEP - 2. Click next. + +> **NOTE:** Ensure the Permission model in the Access Configuration of Key Vault is set to **'Vault access policy'** + +**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Armis Device data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Armis API Authorization Key(s) +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**8. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Armis connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ArmisDevice-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-ArmisDevice-azuredeploy-gov) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the below information : + Function Name + Workspace ID + Workspace Key + Armis Secret Key + Armis URL (https://.armis.com/api/v1/) + Armis Device Table Name + Armis Schedule + KeyVault Name + Azure Client Id + Azure Client Secret + Tenant Id +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**9. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Armis Device data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-ArmisDevice311-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ARMISXXXXX). + + e. **Select a runtime:** Choose Python 3.11 + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective values (case-sensitive): + Workspace ID + Workspace Key + Armis Secret Key + Armis URL (https://.armis.com/api/v1/) + Armis Device Table Name + Armis Schedule + KeyVault Name + Azure Client Id + Azure Client Secret + Tenant Id + logAnalyticsUri (optional) + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/armorblox.md b/Tools/Solutions Analyzer/connector-docs/connectors/armorblox.md index 959291373c6..cad9c14b321 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/armorblox.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/armorblox.md @@ -10,4 +10,100 @@ The [Armorblox](https://www.armorblox.com/) data connector provides the capability to ingest incidents from your Armorblox instance into Microsoft Sentinel through the REST API. The connector provides ability to get events which helps to examine potential security risks, and more. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Armorblox Instance Details**: **ArmorbloxInstanceName** OR **ArmorbloxInstanceURL** is required +- **Armorblox API Credentials**: **ArmorbloxAPIToken** is required + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Armorblox API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Configuration steps for the Armorblox API** + + Follow the instructions to obtain the API token. + +1. Log in to the Armorblox portal with your credentials. +2. In the portal, click **Settings**. +3. In the **Settings** view, click **API Keys** +4. Click **Create API Key**. +5. Enter the required information. +6. Click **Create**, and copy the API token displayed in the modal. +7. Save API token for using in the data connector. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Armorblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Armorblox data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-armorblox-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. +3. Enter the **ArmorbloxAPIToken**, **ArmorbloxInstanceURL** OR **ArmorbloxInstanceName**, and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Armorblox data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-armorblox-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. Armorblox). + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select ** New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + ArmorbloxAPIToken + ArmorbloxInstanceName OR ArmorbloxInstanceURL + WorkspaceID + WorkspaceKey + LogAnalyticsUri (optional) +> - Use LogAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/arubaclearpass.md b/Tools/Solutions Analyzer/connector-docs/connectors/arubaclearpass.md index 037b6c51cfd..358b097dfc7 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/arubaclearpass.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/arubaclearpass.md @@ -10,4 +10,61 @@ The [Aruba ClearPass](https://www.arubanetworks.com/products/security/network-access-control/secure-access/) connector allows you to easily connect your Aruba ClearPass with Microsoft Sentinel, to create custom dashboards, alerts, and improve investigation. This gives you more insight into your organization’s network and improves your security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias ArubaClearPass and load the function code or click [here](https://aka.ms/sentinel-arubaclearpass-parser).The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Aruba ClearPass logs to a Syslog agent** + +Configure Aruba ClearPass to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent. +1. [Follow these instructions](https://www.arubanetworks.com/techdocs/ClearPass/6.7/PolicyManager/Content/CPPM_UserGuide/Admin/syslogExportFilters_add_syslog_filter_general.htm) to configure the Aruba ClearPass to forward syslog. +2. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/arubaclearpassama.md b/Tools/Solutions Analyzer/connector-docs/connectors/arubaclearpassama.md index f4713502ac5..b88cee0f76d 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/arubaclearpassama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/arubaclearpassama.md @@ -10,4 +10,61 @@ The [Aruba ClearPass](https://www.arubanetworks.com/products/security/network-access-control/secure-access/) connector allows you to easily connect your Aruba ClearPass with Microsoft Sentinel, to create custom dashboards, alerts, and improve investigation. This gives you more insight into your organization’s network and improves your security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias ArubaClearPass and load the function code or click [here](https://aka.ms/sentinel-arubaclearpass-parser).The function usually takes 10-15 minutes to activate after solution installation/update. +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Aruba ClearPass logs to a Syslog agent** + + Configure Aruba ClearPass to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent. +1. [Follow these instructions](https://www.arubanetworks.com/techdocs/ClearPass/6.7/PolicyManager/Content/CPPM_UserGuide/Admin/syslogExportFilters_add_syslog_filter_general.htm) to configure the Aruba ClearPass to forward syslog. +2. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/asimdnsactivitylogs.md b/Tools/Solutions Analyzer/connector-docs/connectors/asimdnsactivitylogs.md index 4692937c5eb..09453946677 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/asimdnsactivitylogs.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/asimdnsactivitylogs.md @@ -36,4 +36,17 @@ Windows DNS events are supported by Advanced SIEM Information Model (ASIM) and s For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2225993&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +## Permissions + +**Resource Provider Permissions:** +- **Workspace data sources** (Workspace): read and write permissions. + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `ASimDnsActivityLogs`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/atlassianbeaconalerts.md b/Tools/Solutions Analyzer/connector-docs/connectors/atlassianbeaconalerts.md index ea9fd597161..6612b581400 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/atlassianbeaconalerts.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/atlassianbeaconalerts.md @@ -10,4 +10,52 @@ Atlassian Beacon is a cloud product that is built for Intelligent threat detection across the Atlassian platforms (Jira, Confluence, and Atlassian Admin). This can help users detect, investigate and respond to risky user activity for the Atlassian suite of products. The solution is a custom data connector from DEFEND Ltd. that is used to visualize the alerts ingested from Atlassian Beacon to Microsoft Sentinel via a Logic App. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Microsoft Sentinel** + +>1. Navigate to the newly installed Logic App 'Atlassian Beacon Integration' + +>2. Navigate to 'Logic app designer' + +>3. Expand the 'When a HTTP request is received' + +>4. Copy the 'HTTP POST URL' + +**2. Atlassian Beacon** + +>1. Login to Atlassian Beacon using an admin account + +>2. Navigate to 'SIEM forwarding' under SETTINGS + +> 3. Paste the copied URL from Logic App in the text box + +> 4. Click the 'Save' button + +**3. Testing and Validation** + +>1. Login to Atlassian Beacon using an admin account + +>2. Navigate to 'SIEM forwarding' under SETTINGS + +> 3. Click the 'Test' button right next to the newly configured webhook + +> 4. Navigate to Microsoft Sentinel + +> 5. Navigate to the newly installed Logic App + +> 6. Check for the Logic App Run under 'Runs history' + +> 7. Check for logs under the table name 'atlassian_beacon_alerts_CL' in 'Logs' + +> 8. If the analytic rule has been enabled, the above Test alert should have created an incident in Microsoft Sentinel + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/atlassianconfluence.md b/Tools/Solutions Analyzer/connector-docs/connectors/atlassianconfluence.md index 7fb403eab27..44693d22eb3 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/atlassianconfluence.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/atlassianconfluence.md @@ -10,4 +10,22 @@ The Atlassian Confluence data connector provides the capability to ingest [Atlassian Confluence audit logs](https://developer.atlassian.com/cloud/confluence/rest/api-group-audit/) into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +**Custom Permissions:** +- **Atlassian Confluence API credentials**: Confluence Username and Confluence Access Token are required. [See the documentation to learn more about Atlassian Confluence API](https://developer.atlassian.com/cloud/confluence/rest/intro/). Confluence domain must be provided as well. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Atlassian Confluence** + +Please insert your credentials +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `BasicAuth`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/auth0.md b/Tools/Solutions Analyzer/connector-docs/connectors/auth0.md index 86ed20123fe..779f92c9ea8 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/auth0.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/auth0.md @@ -10,4 +10,96 @@ The [Auth0 Access Management](https://auth0.com/access-management) data connector provides the capability to ingest [Auth0 log events](https://auth0.com/docs/api/management/v2/#!/Logs/get_logs) into Microsoft Sentinel +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **API token** is required. [See the documentation to learn more about API token](https://auth0.com/docs/secure/tokens/access-tokens/get-management-api-access-tokens-for-production) + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Auth0 Management APIs to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Configuration steps for the Auth0 Management API** + + Follow the instructions to obtain the credentials. + +1. In Auth0 Dashboard, go to **Applications > Applications**. +2. Select your Application. This should be a "Machine-to-Machine" Application configured with at least **read:logs** and **read:logs_users** permissions. +3. Copy **Domain, ClientID, Client Secret** + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Auth0 Access Management data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Auth0 Access Management data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-Auth0AccessManagement-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. +3. Enter the ****Domain, ClientID, Client Secret****, **AzureSentinelWorkspaceId**, **AzureSentinelSharedKey**. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Auth0 Access Management data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-Auth0AccessManagement-azuredeploy) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. Auth0AMXXXXX). + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select ** New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + DOMAIN + CLIENT_ID + CLIENT_SECRET + WorkspaceID + WorkspaceKey + logAnalyticsUri (optional) +> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/auth0connectorccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/auth0connectorccpdefinition.md index 34e1082c982..5a11c8b25ef 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/auth0connectorccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/auth0connectorccpdefinition.md @@ -10,4 +10,23 @@ The [Auth0](https://auth0.com/docs/api/management/v2/logs/get-logs) data connector allows ingesting logs from Auth0 API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses Auth0 API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +#### STEP 1 - Configuration steps for the Auth0 Management API +Follow the instructions to obtain the credentials. + 1. In Auth0 Dashboard, go to [**Applications > Applications**] + 2. Select your Application. This should be a [**Machine-to-Machine**] Application configured with at least [**read:logs**] and [**read:logs_users**] permissions. + 3. Copy [**Domain, ClientID, Client Secret**] +- **Base API URL**: https://example.auth0.com +- **Client ID**: Client ID +- **Client Secret**: (password field) +- Click 'Connect' to establish connection + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/authomize.md b/Tools/Solutions Analyzer/connector-docs/connectors/authomize.md index b8622c3e94c..ff10b6f7293 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/authomize.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/authomize.md @@ -10,4 +10,29 @@ The Authomize Data Connector provides the capability to ingest custom log types from Authomize into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Include custom pre-requisites if the connectivity requires - else delete customs**: Description for any custom pre-requisite + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Locate your Authomize API key** + +Follow the setup instructions [located under Data Connectors for Authomize](https://github.com/authomize/Open-ITDR/blob/main/Open-Connectors/Platform/Azure-Sentinel/Data%20Connectors/readme.md). + +**2. Deploy the Authomize data connector using the setup instructions.** + +Follow the Instructions on [deploying the data connector to ingest data from Authomize](https://github.com/authomize/Open-ITDR/blob/main/Open-Connectors/Platform/Azure-Sentinel/Data%20Connectors/readme.md). + +**3. Finalize your setup** + +Validate that your script is running. Simple instructions are located under the [Authomize Data Connector area](https://github.com/authomize/Open-ITDR/blob/main/Open-Connectors/Platform/Azure-Sentinel/Data%20Connectors/readme.md). + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/automatedlogicwebctrl.md b/Tools/Solutions Analyzer/connector-docs/connectors/automatedlogicwebctrl.md index 96c320d3d89..aaa40db38f1 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/automatedlogicwebctrl.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/automatedlogicwebctrl.md @@ -10,4 +10,49 @@ You can stream the audit logs from the WebCTRL SQL server hosted on Windows machines connected to your Microsoft Sentinel. This connection enables you to view dashboards, create custom alerts and improve investigation. This gives insights into your Industrial Control Systems that are monitored or controlled by the WebCTRL BAS application. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Install and onboard the Microsoft agent for Windows.** + +Learn about [agent setup](https://docs.microsoft.com/services-hub/health/mma-setup) and [windows events onboarding](https://docs.microsoft.com/azure/azure-monitor/agents/data-sources-windows-events). + + You can skip this step if you have already installed the Microsoft agent for Windows + +**2. Configure Windows task to read the audit data and write it to windows events** + +Install and configure the Windows Scheduled Task to read the audit logs in SQL and write them as Windows Events. These Windows Events will be collected by the agent and forward to Microsoft Sentinel. + +> Notice that the data from all machines will be stored in the selected workspace +2.1 Copy the [setup files](https://aka.ms/sentinel-automatedlogicwebctrl-tasksetup) to a location on the server. + + 2.2 Update the [ALC-WebCTRL-AuditPull.ps1](https://aka.ms/sentinel-automatedlogicwebctrl-auditpull) (copied in above step) script parameters like the target database name and windows event id's. Refer comments in the script for more details. + + 2.3 Update the windows task settings in the [ALC-WebCTRL-AuditPullTaskConfig.xml](https://aka.ms/sentinel-automatedlogicwebctrl-auditpulltaskconfig) file that was copied in above step as per requirement. Refer comments in the file for more details. + + 2.4 Install windows tasks using the updated configs copied in the above steps + - **Run the following command in powershell from the directory where the setup files are copied in step 2.1**: `schtasks.exe /create /XML "ALC-WebCTRL-AuditPullTaskConfig.xml" /tn "ALC-WebCTRL-AuditPull"` +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the Event schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, validate below steps for any run time issues: + +> 1. Make sure that the scheduled task is created and is in running state in the Windows Task Scheduler. + +>2. Check for task execution errors in the history tab in Windows Task Scheduler for the newly created task in step 2.4 + +>3. Make sure that the SQL Audit table consists new records while the scheduled windows task runs. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/aws.md b/Tools/Solutions Analyzer/connector-docs/connectors/aws.md index 1949830704c..375a0d86352 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/aws.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/aws.md @@ -10,4 +10,26 @@ Follow these instructions to connect to AWS and stream your CloudTrail logs into Microsoft Sentinel. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2218883&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect AWS cloud trail with Microsoft Sentinel​** + +The connection necessitates giving Microsoft permissions to access your AWS account. To enable this, follow the instructions under [Connect AWS to Microsoft Sentinel](https://aka.ms/AWSConnector) and use these parameters when prompted: + +> Data from all regions will be sent to and stored in the workspace's region. + +> It takes about 5 minutes until the connection streams data to your workspace. +- **Microsoft account ID**: `MicrosoftAwsAccount` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **External ID (Workspace ID)**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `AwsCloudTrail`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/awscloudfrontccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/awscloudfrontccpdefinition.md index 0bd7ad2a9c5..0b0d194a412 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/awscloudfrontccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/awscloudfrontccpdefinition.md @@ -10,4 +10,61 @@ This data connector enables the integration of AWS CloudFront logs with Microsoft Sentinel to support advanced threat detection, investigation, and security monitoring. By utilizing Amazon S3 for log storage and Amazon SQS for message queuing, the connector reliably ingests CloudFront access logs into Microsoft Sentinel +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Ingesting AWS CloudFront logs in Microsoft Sentinel** + +### List of Resources Required: + +* Open ID Connect (OIDC) web identity provider +* IAM Role +* Amazon S3 Bucket +* Amazon SQS +* AWS CloudFront configuration +#### 1. AWS CloudFormation Deployment + To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from an S3 bucket to your Log Analytics Workspace. + #### For each template, create Stack in AWS: + 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). + 2. Choose the ‘**Specify template**’ option, then ‘**Upload a template file**’ by clicking on ‘**Choose file**’ and selecting the appropriate CloudFormation template file provided below. click ‘**Choose file**’ and select the downloaded template. + 3. Click '**Next**' and '**Create stack**'. +- **Template 1: OpenID connect authentication deployment**: `Oidc` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Template 2: AWSCloudFront resources deployment**: `AWSCloudFront` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### 2. Connect new collectors + To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect. +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Role ARN** +- **Queue URL** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + +**Add new controller** + +*AWS S3 connector* + +When you click the "Add new collector" button in the portal, a configuration form will open. You'll need to provide: + +*Account details* + +- **Role ARN** (required) +- **Queue URL** (required) + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/awsnetworkfirewallccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/awsnetworkfirewallccpdefinition.md index 787602022b3..7c08e9efb78 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/awsnetworkfirewallccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/awsnetworkfirewallccpdefinition.md @@ -10,4 +10,67 @@ This data connector allows you to ingest AWS Network Firewall logs into Microsoft Sentinel for advanced threat detection and security monitoring. By leveraging Amazon S3 and Amazon SQS, the connector forwards network traffic logs, intrusion detection alerts, and firewall events to Microsoft Sentinel, enabling real-time analysis and correlation with other security data +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Ingesting AWS NetworkFirewall logs in Microsoft Sentinel** + +### List of Resources Required: + +* Open ID Connect (OIDC) web identity provider +* IAM Role +* Amazon S3 Bucket +* Amazon SQS +* AWSNetworkFirewall configuration +* Follow this instructions for [AWS NetworkFirewall Data connector](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20NetworkFirewall/Data%20Connectors/readme.md) configuration +#### 1. AWS CloudFormation Deployment + To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from an S3 bucket to your Log Analytics Workspace. + #### For each template, create Stack in AWS: + 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). + 2. Choose the ‘**Specify template**’ option, then ‘**Upload a template file**’ by clicking on ‘**Choose file**’ and selecting the appropriate CloudFormation template file provided below. click ‘**Choose file**’ and select the downloaded template. + 3. Click '**Next**' and '**Create stack**'. +- **Template 1: OpenID connect authentication deployment**: `Oidc` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Template 2: AWSNetworkFirewall resources deployment**: `AWSNetworkFirewall` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### 2. Connect new collectors + To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect. +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Role ARN** +- **Queue URL** +- **Stream name** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + +**Add new controller** + +*AWS S3 connector* + +When you click the "Add new collector" button in the portal, a configuration form will open. You'll need to provide: + +*Account details* + +- **Role ARN** (required) +- **Queue URL** (required) +- **Data type** (required): Select from available options + - Alert Log + - Flow Log + - Tls Log + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/awsroute53resolverccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/awsroute53resolverccpdefinition.md index 6d4f622c85f..a2b4f51e1c9 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/awsroute53resolverccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/awsroute53resolverccpdefinition.md @@ -10,4 +10,108 @@ This connector enables ingestion of AWS Route 53 DNS logs into Microsoft Sentinel for enhanced visibility and threat detection. It supports DNS Resolver query logs ingested directly from AWS S3 buckets, while Public DNS query logs and Route 53 audit logs can be ingested using Microsoft Sentinel's AWS CloudWatch and CloudTrail connectors. Comprehensive instructions are provided to guide you through the setup of each log type. Leverage this connector to monitor DNS activity, detect potential threats, and improve your security posture in cloud environments. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. AWS Route53** + +This connector enables the ingestion of AWS Route 53 DNS logs into Microsoft Sentinel, providing enhanced visibility into DNS activity and strengthening threat detection capabilities. It supports direct ingestion of DNS Resolver query logs from AWS S3 buckets, while Public DNS query logs and Route 53 audit logs can be ingested via Microsoft Sentinel’s AWS CloudWatch and CloudTrail connectors. Detailed setup instructions are provided for each log type. Use this connector to monitor DNS traffic, identify potential threats, and enhance your cloud security posture. + +You can ingest the following type of logs from AWS Route 53 to Microsoft Sentinel: +1. Route 53 Resolver query logs +2. Route 53 Public Hosted zones query logs (via Microsoft Sentinel CloudWatch connector) +3. Route 53 audit logs (via Microsoft Sentinel CloudTrail connector) + +**Ingesting Route53 Resolver query logs in Microsoft Sentinel** + + ### List of Resources Required: + +* Open ID Connect (OIDC) web identity provider +* IAM Role +* Amazon S3 Bucket +* Amazon SQS +* Route 53 Resolver query logging configuration +* VPC to associate with Route53 Resolver query log config + #### 1. AWS CloudFormation Deployment + To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from an S3 bucket to your Log Analytics Workspace. + #### For each template, create Stack in AWS: + 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). + 2. Choose the ‘**Specify template**’ option, then ‘**Upload a template file**’ by clicking on ‘**Choose file**’ and selecting the appropriate CloudFormation template file provided below. click ‘**Choose file**’ and select the downloaded template. + 3. Click '**Next**' and '**Create stack**'. + - **Template 1: OpenID connect authentication deployment**: `Oidc` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + - **Template 2: AWS Route53 resources deployment**: `AWSRoute53Resolver` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + ### 2. Connect new collectors + To enable Amazon Web Services S3 DNS Route53 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect. + **Connector Management Interface** + + This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + + 📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: + - **Role ARN** + - **Queue URL** + + ➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + + 🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + + > 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + + **Add new controller** + + *AWS Security Hub connector* + + When you click the "Add new collector" button in the portal, a configuration form will open. You'll need to provide: + + *Account details* + + - **Role ARN** (required) + - **Queue URL** (required) + + > 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + + + **Ingesting Route 53 Public Hosted zones query logs (via Microsoft Sentinel CloudWatch connector)** + + Public Hosted zone query logs are exported to CloudWatch service in AWS. We can use 'Amazon Web Services S3' connector to ingest CloudWatch logs from AWS to Microsoft Sentinel. +**Step 1: Configure logging for Public DNS queries** + + 1. Sign in to the AWS Management Console and open the Route 53 console at [AWS Route 53](https://console.aws.amazon.com/route53/). +2. Navigate to Route 53 > Hosted zones. +3. Choose the Public hosted zone that you want to configure query logging for. +4. In the Hosted zone details pane, click "Configure query logging". +5. Choose an existing log group or create a new log group. +6. Choose Create. + + **Step 2: Configure Amazon Web Services S3 data connector for AWS CloudWatch** + + AWS CloudWatch logs can be exported to an S3 bucket using lambda function. To ingest Public DNS queries from `AWS CloudWatch` to `S3` bucket and then to Microsoft Sentinel, follow the instructions provided in the [Amazon Web Services S3 connector](https://learn.microsoft.com/en-us/azure/sentinel/connect-aws?tabs=s3). + + **Ingesting Route 53 audit logs (via Microsoft Sentinel CloudTrail connector)** + + Route 53 audit logs i.e. the logs related to actions taken by user, role or AWS service in Route 53 can be exported to an S3 bucket via AWS CloudTrail service. We can use 'Amazon Web Services S3' connector to ingest CloudTrail logs from AWS to Microsoft Sentinel. +**Step 1: Configure logging for AWS Route 53 Audit logs** + + 1. Sign in to the AWS Management Console and open the CloudTrail console at [AWS CloudTrail](https://console.aws.amazon.com/cloudtrail) +2. If you do not have an existing trail, click on 'Create trail' +3. Enter a name for your trail in the Trail name field. +4. Select Create new S3 bucket (you may also choose to use an existing S3 bucket). +5. Leave the other settings as default, and click Next. +6. Select Event type, make sure Management events is selected. +7. Select API activity, 'Read' and 'Write' +8. Click Next. +9. Review the settings and click 'Create trail'. + + **Step 2: Configure Amazon Web Services S3 data connector for AWS CloudTrail** + + To ingest audit and management logs from `AWS CloudTrail` to Microsoft Sentinel, follow the instructions provided in the [Amazon Web Services S3 connector](https://learn.microsoft.com/en-us/azure/sentinel/connect-aws?tabs=s3) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/awss3.md b/Tools/Solutions Analyzer/connector-docs/connectors/awss3.md index 52369224cac..290693d087f 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/awss3.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/awss3.md @@ -22,4 +22,47 @@ This connector allows you to ingest AWS service logs, collected in AWS S3 bucket For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2218883&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission. + +**Custom Permissions:** +- **Environment**: you must have the following AWS resources defined and configured: S3, Simple Queue Service (SQS), IAM roles and permissions policies, and the AWS services whose logs you want to collect. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Set up your AWS environment** + +There are two options for setting up your AWS environment to send logs from an S3 bucket to your Log Analytics Workspace: +**Setup with PowerShell script (recommended)** + Download and extract the files from the following link: [AWS S3 Setup Script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/AWS-S3/ConfigAwsS3DataConnectorScripts.zip). + +> 1. Make sure that you have PowerShell on your machine: [Installation instructions for PowerShell](https://docs.microsoft.com/powershell/scripting/install/installing-powershell?view=powershell-7.2). + +> 2. Make sure that you have the AWS CLI on your machine: [Installation instructions for the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2.html). + +Before running the script, run the aws configure command from your PowerShell command line, and enter the relevant information as prompted. See [AWS Command Line Interface | Configuration basics](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html) for details. Note: When Aws configure is run, Default output format should not be set to None. It must be set to some value, such as json. + +**Government Cloud:** + Download and extract the files from the following link: [AWS S3 Setup Script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/AWS-S3/ConfigAwsS3DataConnectorScriptsGov.zip). + +> 1. Make sure that you have PowerShell on your machine: [Installation instructions for PowerShell](https://docs.microsoft.com/powershell/scripting/install/installing-powershell?view=powershell-7.2). + +> 2. Make sure that you have the AWS CLI on your machine: [Installation instructions for the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2.html). + +Before running the script, run the aws configure command from your PowerShell command line, and enter the relevant information as prompted. See [AWS Command Line Interface | Configuration basics](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html) for details. Note: When Aws configure is run, Default output format should not be set to None. It must be set to some value, such as json. + - **Run script to set up the environment**: `./ConfigAwsConnector.ps1` + - **External ID (Workspace ID)**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + + **Manual Setup** + + Follow the instruction in the following link to set up the environment: [Connect AWS S3 to Microsoft Sentinel](https://aka.ms/AWSS3Connector) + +**2. Add connection** +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `AwsS3`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/awss3serveraccesslogsdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/awss3serveraccesslogsdefinition.md index f0c0dd926b3..5a5cb97be1d 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/awss3serveraccesslogsdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/awss3serveraccesslogsdefinition.md @@ -10,4 +10,60 @@ This connector allows you to ingest AWS S3 Server Access Logs into Microsoft Sentinel. These logs contain detailed records for requests made to S3 buckets, including the type of request, resource accessed, requester information, and response details. These logs are useful for analyzing access patterns, debugging issues, and ensuring security compliance. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +**Custom Permissions:** +- **Environment**: You must have the following AWS resources defined and configured: S3 Bucket, Simple Queue Service (SQS), IAM roles and permissions policies. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +### 1. AWS CloudFormation Deployment + To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from an AWS S3 Server Access logs to your Log Analytics Workspace. + +#### Deploy CloudFormation Templates in AWS: +1. Navigate to the [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). +2. Click **Create stack** and select **With new resources**. +3. Choose **Upload a template file**, then click **Choose file** to upload the appropriate CloudFormation template provided. +4. Follow the prompts and click **Next** to complete the stack creation. +5. After the stacks are created, note down the **Role ARN** and **SQS Queue URL**. + +- **Template 1: OpenID Connect authentication provider deployment**: `Oidc` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Template 2: AWS Server Access resources deployment**: `AWSS3ServerAccess` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +### 2. Connect new collectors + To enable AWS S3 Server Access Logs Connector for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect. +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Role ARN** +- **Queue URL** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + +**Add new collector** + +*AWS Server Access Logs connector* + +When you click the "Add new collector" button in the portal, a configuration form will open. You'll need to provide: + +*Account details* + +- **Role ARN** (required) +- **Queue URL** (required) + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/awss3vpcflowlogsparquetdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/awss3vpcflowlogsparquetdefinition.md index 7d8ae051854..326119a3257 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/awss3vpcflowlogsparquetdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/awss3vpcflowlogsparquetdefinition.md @@ -10,4 +10,57 @@ This connector allows you to ingest AWS VPC Flow Logs, collected in AWS S3 buckets, to Microsoft Sentinel. AWS VPC Flow Logs provide visibility into network traffic within your AWS Virtual Private Cloud (VPC), enabling security analysis and network monitoring. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +#### 1. AWS CloudFormation Deployment + To configure access on AWS, two templates have been generated to set up the AWS environment to send VPC Flow Logs from an S3 bucket to your Log Analytics Workspace. + #### For each template, create a Stack in AWS: + 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). + 2. Choose the ‘Specify template’ option, then ‘Upload a template file’ by clicking on ‘Choose file’ and selecting the appropriate CloudFormation template file provided below. Click ‘Choose file’ and select the downloaded template. + 3. Click 'Next' and 'Create stack'. +- **Template 1: OpenID connect authentication deployment**: `Oidc` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Template 2: AWS VPC Flow Logs resources deployment**: `AwsVPCFlow` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### 2. Connect new collectors + To enable AWS S3 for Microsoft Sentinel, click the 'Add new collector' button, fill in the required information and click on 'Connect' +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Role ARN** +- **Queue URL** +- **File Format** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + +**Add new controller** + +*AWS VPC Flow Logs connector* + +When you click the "Add new collector" button in the portal, a configuration form will open. You'll need to provide: + +*Account details* + +- **Role ARN** (required) +- **Queue URL** (required) +- **Data type** (required): Select from available options + - JSON Format + - Parquet Format + - CSV Format + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/awss3wafccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/awss3wafccpdefinition.md index 1b45667e5bf..a89d7dc4921 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/awss3wafccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/awss3wafccpdefinition.md @@ -10,4 +10,53 @@ This connector allows you to ingest AWS WAF logs, collected in AWS S3 buckets, to Microsoft Sentinel. AWS WAF logs are detailed records of traffic that web access control lists (ACLs) analyze, which are essential for maintaining the security and performance of web applications. These logs contain information such as the time AWS WAF received the request, the specifics of the request, and the action taken by the rule that the request matched. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +#### 1. AWS CloudFormation Deployment + To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from an S3 bucket to your Log Analytics Workspace. + #### For each template, create Stack in AWS: + 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). + 2. Choose the ‘Specify template’ option, then ‘Upload a template file’ by clicking on ‘Choose file’ and selecting the appropriate CloudFormation template file provided below. click ‘Choose file’ and select the downloaded template. + 3. Click 'Next' and 'Create stack'. +- **Template 1: OpenID connect authentication deployment**: `Oidc` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Template 2: AWS WAF resources deployment**: `AwsWAF` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### 2. Connect new collectors + To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect. +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Role ARN** +- **Queue URL** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + +**Add new controller** + +*AWS S3 connector* + +When you click the "Add new collector" button in the portal, a configuration form will open. You'll need to provide: + +*Account details* + +- **Role ARN** (required) +- **Queue URL** (required) + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/awssecurityhubfindingsccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/awssecurityhubfindingsccpdefinition.md index 9c3e1754f85..2bf1e4d2eb8 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/awssecurityhubfindingsccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/awssecurityhubfindingsccpdefinition.md @@ -10,4 +10,60 @@ This connector enables the ingestion of AWS Security Hub Findings, which are collected in AWS S3 buckets, into Microsoft Sentinel. It helps streamline the process of monitoring and managing security alerts by integrating AWS Security Hub Findings with Microsoft Sentinel's advanced threat detection and response capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +**Custom Permissions:** +- **Environment**: You must have the following AWS resources defined and configured: AWS Security Hub, Amazon Data Firehose, Amazon EventBridge, S3 Bucket, Simple Queue Service (SQS), IAM roles and permissions policies. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +### 1. AWS CloudFormation Deployment + Use the provided CloudFormation templates to configure the AWS environment for sending logs from AWS Security Hub to your Log Analytics Workspace. + +#### Deploy CloudFormation Templates in AWS: +1. Navigate to the [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). +2. Click **Create stack** and select **With new resources**. +3. Choose **Upload a template file**, then click **Choose file** to upload the appropriate CloudFormation template provided. +4. Follow the prompts and click **Next** to complete the stack creation. +5. After the stacks are created, note down the **Role ARN** and **SQS Queue URL**. + +- **Template 1: OpenID Connect authentication provider deployment**: `Oidc` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Template 2: AWS Security Hub resources deployment**: `AwsSecurityHub` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +### 2. Connect new collectors + To enable AWS Security Hub Connector for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect. +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Role ARN** +- **Queue URL** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + +**Add new controller** + +*AWS Security Hub connector* + +When you click the "Add new collector" button in the portal, a configuration form will open. You'll need to provide: + +*Account details* + +- **Role ARN** (required) +- **Queue URL** (required) + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/azureactivedirectory.md b/Tools/Solutions Analyzer/connector-docs/connectors/azureactivedirectory.md index 7cc1324d0d1..ed915f4c40c 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/azureactivedirectory.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/azureactivedirectory.md @@ -10,4 +10,41 @@ Gain insights into Microsoft Entra ID by connecting Audit and Sign-in logs to Microsoft Sentinel to gather insights around Microsoft Entra ID scenarios. You can learn about app usage, conditional access policies, legacy auth relate details using our Sign-in logs. You can get information on your Self Service Password Reset (SSPR) usage, Microsoft Entra ID Management activities like user, group, role, app management using our Audit logs table. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/?linkid=2219715&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. +- **Diagnostic Settings** (/providers/microsoft.aadiam): read and write permissions to AAD diagnostic settings. + +**Tenant Permissions:** +Requires GlobalAdmin, SecurityAdmin on the workspace's tenant + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Microsoft Entra ID logs to Microsoft Sentinel** + +Select Microsoft Entra ID log types: +**Select Microsoft Entra ID Data Types** + +In the Microsoft Sentinel portal, select which data types to enable: + +- ☐ **Sign-In Logs** +- ☐ **Audit Logs** +- ☐ **Non-Interactive User Sign-In Log** +- ☐ **Service Principal Sign-In Logs** +- ☐ **Managed Identity Sign-In Logs** +- ☐ **Provisioning Logs** +- ☐ **ADFS Sign-In Logs** +- ☐ **User Risk Events** +- ☐ **Risky Users** +- ☐ **Network Access Traffic Logs** +- ☐ **Risky Service Principals** +- ☐ **Service Principal Risk Events** + +Each data type may have specific licensing requirements. Review the information provided for each type in the portal before enabling. + +> 💡 **Portal-Only Feature**: Data type selection is only available in the Microsoft Sentinel portal. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/azureactivedirectoryidentityprotection.md b/Tools/Solutions Analyzer/connector-docs/connectors/azureactivedirectoryidentityprotection.md index fbe3f9aaa66..f552e2bb1f5 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/azureactivedirectoryidentityprotection.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/azureactivedirectoryidentityprotection.md @@ -14,4 +14,26 @@ Microsoft Entra ID Protection provides a consolidated view at risk users, risk e [Get Microsoft Entra ID Premium P1/P2 ](https://aka.ms/asi-ipcconnectorgetlink) +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. + +**Licenses:** +- Azure AD Premium P1/P2 + +**Tenant Permissions:** +Requires GlobalAdmin, SecurityAdmin on the workspace's tenant + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Microsoft Entra ID Protection alerts to Microsoft Sentinel** + +Connect Microsoft Entra ID Protection to Microsoft Sentinel. + +> The alerts are sent to this Microsoft Sentinel workspace. +- Connect Microsoft Entra ID Protection + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/azureactivity.md b/Tools/Solutions Analyzer/connector-docs/connectors/azureactivity.md index fc598b8f2ef..3c7cd8f7c20 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/azureactivity.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/azureactivity.md @@ -10,4 +10,38 @@ Azure Activity Log is a subscription log that provides insight into subscription-level events that occur in Azure, including events from Azure Resource Manager operational data, service health events, write operations taken on the resources in your subscription, and the status of activities performed in Azure. For more information, see the [Microsoft Sentinel documentation ](https://go.microsoft.com/fwlink/p/?linkid=2219695&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. + +**Custom Permissions:** +- **Policy​**: owner role assigned for each policy assignment scope.​ +- **Subscription**: owner role permission on the relevant subscription + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +ℹ️ This connector has been updated to use the diagnostics settings back-end pipeline. which provides increased functionality and better consistency with resource logs. +Connectors using this pipeline can also be governed at scale by Azure Policy. Learn more about the new Azure Activity connector. +Follow the instructions below to upgrade your connector to the diagnostics settings pipeline. + +**1. Disconnect your subscriptions from the legacy method** + +The subscriptions listed below are still using the older, legacy method. You are strongly encouraged to upgrade to the new pipeline.
+To do this, click on the 'Disconnect All' button below, before proceeding to launch the Azure Policy Assignment wizard. +- Configure data source: AzureActivityLog + +**2. Connect your subscriptions through diagnostic settings new pipeline** + +This connector uses Azure Policy to apply a single Azure Subscription log-streaming configuration to a collection of subscriptions, defined as a scope. +Follow the instructions below to create and apply a policy to all current and future subscriptions. **Note**, you may already have an active policy for this resource type. +**Launch the Azure Policy Assignment wizard and follow the steps.​** + + >1. In the **Basics** tab, click the button with the three dots under **Scope** to select your resources assignment scope. + >2. In the **Parameters** tab, choose your Microsoft Sentinel workspace from the **Log Analytics workspace** drop-down list, and leave marked as "True" all the log and metric types you want to ingest. + >3. To apply the policy on your existing resources, select the **Remediation tab** and mark the **Create a remediation task** checkbox. + - **Configure policy assignment** + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/azureadvancedthreatprotection.md b/Tools/Solutions Analyzer/connector-docs/connectors/azureadvancedthreatprotection.md index b1142bdee50..b8059eca711 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/azureadvancedthreatprotection.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/azureadvancedthreatprotection.md @@ -32,4 +32,26 @@ Connect Microsoft Defender for Identity to gain visibility into the events and u For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2220069&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. + +**Licenses:** +- Microsoft Defender for Identity + +**Tenant Permissions:** +Requires SecurityAdmin, GlobalAdmin on the workspace's tenant + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Microsoft Defender for Identity to Microsoft Sentinel** + +If your tenant is running [Microsoft Defender for Identity](https://aka.ms/Sentinel/MDI/Preview) in Microsoft Defender for Cloud Apps, connect here to stream your Microsoft Defender for Identity alerts into Microsoft Sentinel + +> In order to integrate with Microsoft Defender for Identity alerts, use **global administrator**, or **security administrator** permission. +- Connect Microsoft Defender for Identity + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/azurebatchaccount-ccp.md b/Tools/Solutions Analyzer/connector-docs/connectors/azurebatchaccount-ccp.md index 7df2c04cd50..7674728cd0a 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/azurebatchaccount-ccp.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/azurebatchaccount-ccp.md @@ -10,4 +10,25 @@ Azure Batch Account is a uniquely identified entity within the Batch service. Most Batch solutions use Azure Storage for storing resource files and output files, so each Batch account is usually associated with a corresponding storage account. This connector lets you stream your Azure Batch account diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2224103&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Custom Permissions:** +- **Policy**: owner role assigned for each policy assignment scope + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect your Azure Batch Account diagnostics logs into Sentinel.** + +This connector uses Azure Policy to apply a single Azure Batch Account log-streaming configuration to a collection of instances, defined as a scope. Follow the instructions below to create and apply a policy to all current and future instances. Note, you may already have an active policy for this resource type. +**Stream diagnostics logs from your Azure Batch Account at scale** +**Launch the Azure Policy Assignment wizard and follow the steps.** + + > 1. In the **Basics** tab, click the button with the three dots under **Scope** to select your subscription.
2. In the **Parameters** tab, choose your Microsoft Sentinel workspace from the **Log Analytics workspace** drop-down list, and leave marked as "True" all the log categories you want to ingest.
3. To apply the policy on your existing resources, mark the **Create a remediation task** check box in the **Remediation** tab. + > 📋 **Additional Configuration Step**: This connector includes a configuration step of type `PolicyAssignment`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/azurecloudngfwbypaloaltonetworks.md b/Tools/Solutions Analyzer/connector-docs/connectors/azurecloudngfwbypaloaltonetworks.md index 101879c348c..0c3d9b6f358 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/azurecloudngfwbypaloaltonetworks.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/azurecloudngfwbypaloaltonetworks.md @@ -10,4 +10,27 @@ Cloud Next-Generation Firewall by Palo Alto Networks - an Azure Native ISV Service - is Palo Alto Networks Next-Generation Firewall (NGFW) delivered as a cloud-native service on Azure. You can discover Cloud NGFW in the Azure Marketplace and consume it in your Azure Virtual Networks (VNet). With Cloud NGFW, you can access the core NGFW capabilities such as App-ID, URL filtering based technologies. It provides threat prevention and detection through cloud-delivered security services and threat prevention signatures. The connector allows you to easily connect your Cloud NGFW logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities. For more information, see the [Cloud NGFW for Azure documentation](https://docs.paloaltonetworks.com/cloud-ngfw/azure). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Cloud NGFW by Palo Alto Networks to Microsoft Sentinel** + +Enable Log Settings on All Cloud NGFWs by Palo Alto Networks. +- Configure log settings: OpenCloudNGFW + +Inside your Cloud NGFW resource: + +1. Navigate to the **Log Settings** from the homepage. +2. Ensure the **Enable Log Settings** checkbox is checked. +3. From the **Log Settings** drop-down, choose the desired Log Analytics Workspace. +4. Confirm your selections and configurations. +5. Click **Save** to apply the settings. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/azurecognitivesearch-ccp.md b/Tools/Solutions Analyzer/connector-docs/connectors/azurecognitivesearch-ccp.md index 085c653e823..f376d0d8e7d 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/azurecognitivesearch-ccp.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/azurecognitivesearch-ccp.md @@ -10,4 +10,25 @@ Azure Cognitive Search is a cloud search service that gives developers infrastructure, APIs, and tools for building a rich search experience over private, heterogeneous content in web, mobile, and enterprise applications. This connector lets you stream your Azure Cognitive Search diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Custom Permissions:** +- **Policy**: owner role assigned for each policy assignment scope + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect your Azure Cognitive Search diagnostics logs into Sentinel.** + +This connector uses Azure Policy to apply a single Azure Cognitive Search log-streaming configuration to a collection of instances, defined as a scope. Follow the instructions below to create and apply a policy to all current and future instances. Note, you may already have an active policy for this resource type. +**Stream diagnostics logs from your Azure Cognitive Search at scale** +**Launch the Azure Policy Assignment wizard and follow the steps.** + + > 1. In the **Basics** tab, click the button with the three dots under **Scope** to select your subscription.
2. In the **Parameters** tab, choose your Microsoft Sentinel workspace from the **Log Analytics workspace** drop-down list, and leave marked as "True" all the log categories you want to ingest.
3. To apply the policy on your existing resources, mark the **Create a remediation task** check box in the **Remediation** tab. + > 📋 **Additional Configuration Step**: This connector includes a configuration step of type `PolicyAssignment`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/azuredatalakestoragegen1-ccp.md b/Tools/Solutions Analyzer/connector-docs/connectors/azuredatalakestoragegen1-ccp.md index 3dd6e65b214..2332fb7cd67 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/azuredatalakestoragegen1-ccp.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/azuredatalakestoragegen1-ccp.md @@ -10,4 +10,25 @@ Azure Data Lake Storage Gen1 is an enterprise-wide hyper-scale repository for big data analytic workloads. Azure Data Lake enables you to capture data of any size, type, and ingestion speed in one single place for operational and exploratory analytics. This connector lets you stream your Azure Data Lake Storage Gen1 diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2223812&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Custom Permissions:** +- **Policy**: owner role assigned for each policy assignment scope + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect your Azure Data Lake Storage Gen1 diagnostics logs into Sentinel.** + +This connector uses Azure Policy to apply a single Azure Data Lake Storage Gen1 log-streaming configuration to a collection of instances, defined as a scope. Follow the instructions below to create and apply a policy to all current and future instances. Note, you may already have an active policy for this resource type. +**Stream diagnostics logs from your Azure Data Lake Storage Gen1 at scale** +**Launch the Azure Policy Assignment wizard and follow the steps.** + + > 1. In the **Basics** tab, click the button with the three dots under **Scope** to select your subscription.
2. In the **Parameters** tab, choose your Microsoft Sentinel workspace from the **Log Analytics workspace** drop-down list, and leave marked as "True" all the log categories you want to ingest.
3. To apply the policy on your existing resources, mark the **Create a remediation task** check box in the **Remediation** tab. + > 📋 **Additional Configuration Step**: This connector includes a configuration step of type `PolicyAssignment`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/azuredevopsauditlogs.md b/Tools/Solutions Analyzer/connector-docs/connectors/azuredevopsauditlogs.md index d397dfc7098..cc966a426e4 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/azuredevopsauditlogs.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/azuredevopsauditlogs.md @@ -10,4 +10,32 @@ The Azure DevOps Audit Logs data connector allows you to ingest audit events from Azure DevOps into Microsoft Sentinel. This data connector is built using the Microsoft Sentinel Codeless Connector Platform, ensuring seamless integration. It leverages the Azure DevOps Audit Logs API to fetch detailed audit events and supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview). These transformations enable parsing of the received audit data into a custom table during ingestion, improving query performance by eliminating the need for additional parsing. By using this connector, you can gain enhanced visibility into your Azure DevOps environment and streamline your security operations. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. + +**Custom Permissions:** +- **Azure DevOps Prerequisite**: Please ensure the following:
1. Register an Entra App in Microsoft Entra Admin Center under App Registrations.
2. In 'API permissions' - add Permissions to 'Azure DevOps - vso.auditlog'.
3. In 'Certificates & secrets' - generate 'Client secret'.
4. In 'Authentication' - add Redirect URI: 'https://portal.azure.com/TokenAuthorize/ExtensionName/Microsoft_Azure_Security_Insights'.
5. In the Azure DevOps settings - enable audit log and set **View audit log** for the user. [Azure DevOps Auditing](https://learn.microsoft.com/en-us/azure/devops/organizations/audit/azure-devops-auditing?view=azure-devops&tabs=preview-page).
6. Ensure the user assigned to connect the data connector has the View audit logs permission explicitly set to Allow at all times. This permission is essential for successful log ingestion. If the permission is revoked or not granted, data ingestion will fail or be interrupted. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect to Azure DevOps to start collecting Audit logs in Microsoft Sentinel.** + +1. Enter the App you have registered. + 2. In the 'Overview' section, copy the Application (client) ID. + 3. Select the 'Endpoints' button, and copy the 'OAuth 2.0 authorization endpoint (v2)' value and the 'OAuth 2.0 token endpoint (v2)' value. + 4. In the 'Certificates & secrets' section, copy the 'Client Secret value', and store it securely. +5. Provide the required information below and click 'Connect'. +- **Token Endpoint**: https://login.microsoftonline.com/{TenantId}/oauth2/v2.0/token +- **Authorization Endpoint**: https://login.microsoftonline.com/{TenantId}/oauth2/v2.0/authorize +- **API Endpoint**: https://auditservice.dev.azure.com/{organizationName}/_apis/audit/auditlog?api-version=7.2-preview +- **OAuth Configuration**: + - App Client ID + - App Client Secret + - Click 'Connect' to authenticate + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/azureeventhub-ccp.md b/Tools/Solutions Analyzer/connector-docs/connectors/azureeventhub-ccp.md index 8b30edb058c..0abefcd630e 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/azureeventhub-ccp.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/azureeventhub-ccp.md @@ -10,4 +10,25 @@ Azure Event Hubs is a big data streaming platform and event ingestion service. It can receive and process millions of events per second. This connector lets you stream your Azure Event Hub diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Custom Permissions:** +- **Policy**: owner role assigned for each policy assignment scope + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect your Azure Event Hub diagnostics logs into Sentinel.** + +This connector uses Azure Policy to apply a single Azure Event Hub log-streaming configuration to a collection of instances, defined as a scope. Follow the instructions below to create and apply a policy to all current and future instances. Note, you may already have an active policy for this resource type. +**Stream diagnostics logs from your Azure Event Hub at scale** +**Launch the Azure Policy Assignment wizard and follow the steps.** + + > 1. In the **Basics** tab, click the button with the three dots under **Scope** to select your subscription.
2. In the **Parameters** tab, choose your Microsoft Sentinel workspace from the **Log Analytics workspace** drop-down list, and leave marked as "True" all the log categories you want to ingest.
3. To apply the policy on your existing resources, mark the **Create a remediation task** check box in the **Remediation** tab. + > 📋 **Additional Configuration Step**: This connector includes a configuration step of type `PolicyAssignment`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/azurefirewall.md b/Tools/Solutions Analyzer/connector-docs/connectors/azurefirewall.md index b8a14bce050..0d80190d13f 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/azurefirewall.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/azurefirewall.md @@ -10,4 +10,29 @@ Connect to Azure Firewall. Azure Firewall is a managed, cloud-based network security service that protects your Azure Virtual Network resources. It's a fully stateful firewall as a service with built-in high availability and unrestricted cloud scalability. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220124&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Azure Firewall to Microsoft Sentinel** + +Enable Diagnostic Logs on All Firewalls. +- **Configure Azure Firewall** + +Inside your Firewall resource: + +1. Select **Diagnostic logs.​** +2. Select **+ Add diagnostic setting.​** +3. In the **Diagnostic setting** blade: + - Type a **Name**. + - Select **Send to Log Analytics**. + - Choose the log destination workspace. + - Select the categories that you want to analyze ( Azure Firewall Network Rule, Azure Firewall Application Rule,Azure Firewall Nat Rule,Azure Firewall Threat Intelligence,Azure Firewall IDPS Signature,Azure Firewall DNS query,Azure Firewall FQDN Resolution Failure,Azure Firewall Fat Flow Log,Azure Firewall Flow Trace Log) + - Click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/azurekeyvault.md b/Tools/Solutions Analyzer/connector-docs/connectors/azurekeyvault.md index 3344a1f35ee..364d6a0e97e 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/azurekeyvault.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/azurekeyvault.md @@ -10,4 +10,28 @@ Azure Key Vault is a cloud service for securely storing and accessing secrets. A secret is anything that you want to tightly control access to, such as API keys, passwords, certificates, or cryptographic keys. This connector lets you stream your Azure Key Vault diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity in all your instances. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220125&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. + +**Custom Permissions:** +- **Policy​**: owner role assigned for each policy assignment scope.​ + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect your Azure Key Vault diagnostics logs into Sentinel.** + +This connector uses Azure Policy to apply a single Azure Key Vault log-streaming configuration to a collection of instances, defined as a scope. +Follow the instructions below to create and apply a policy to all current and future instances. **Note**, you may already have an active policy for this resource type. +**Stream diagnostics logs from your Azure Key Vault at scale** +**Launch the Azure Policy Assignment wizard and follow the steps.​** + + >1. In the **Basics** tab, click the button with the three dots under **Scope** to select your resources assignment scope. + >2. In the **Parameters** tab, choose your Microsoft Sentinel workspace from the **Log Analytics workspace** drop-down list, and leave marked as "True" all the log and metric types you want to ingest. + >3. To apply the policy on your existing resources, select the **Remediation tab** and mark the **Create a remediation task** checkbox. + - **Configure policy assignment** + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/azurekubernetes.md b/Tools/Solutions Analyzer/connector-docs/connectors/azurekubernetes.md index bae379141a0..34365992412 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/azurekubernetes.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/azurekubernetes.md @@ -10,4 +10,28 @@ Azure Kubernetes Service (AKS) is an open-source, fully-managed container orchestration service that allows you to deploy, scale, and manage Docker containers and container-based applications in a cluster environment. This connector lets you stream your Azure Kubernetes Service (AKS) diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity in all your instances. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2219762&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. + +**Custom Permissions:** +- **Policy**: owner role assigned for each policy assignment scope. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect your Azure Kubernetes Service (AKS) diagnostics logs into Sentinel.** + +This connector uses Azure Policy to apply a single Azure Kubernetes Service log-streaming configuration to a collection of instances, defined as a scope. +Follow the instructions below to create and apply a policy to all current and future instances. **Note**, you may already have an active policy for this resource type. +**Stream diagnostics logs from your Azure Kubernetes Service (AKS) at scale** +**Launch the Azure Policy Assignment wizard and follow the steps.** + + >1. In the **Basics** tab, click the button with the three dots under **Scope** to select your resources assignment scope. + >2. In the **Parameters** tab, choose your Microsoft Sentinel workspace from the **Log Analytics workspace** drop-down list, and leave marked as "True" all the log and metric types you want to ingest. + >3. To apply the policy on your existing resources, select the **Remediation tab** and mark the **Create a remediation task** checkbox. + - **Configure policy assignment** + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/azurelogicapps-ccp.md b/Tools/Solutions Analyzer/connector-docs/connectors/azurelogicapps-ccp.md index 4c9951d9f14..050b06dece8 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/azurelogicapps-ccp.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/azurelogicapps-ccp.md @@ -10,4 +10,25 @@ Azure Logic Apps is a cloud-based platform for creating and running automated workflows that integrate your apps, data, services, and systems. This connector lets you stream your Azure Logic Apps diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Custom Permissions:** +- **Policy**: owner role assigned for each policy assignment scope + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect your Logic Apps diagnostics logs into Sentinel.** + +This connector uses Azure Policy to apply a single Azure Logic Apps log-streaming configuration to a collection of instances, defined as a scope. Follow the instructions below to create and apply a policy to all current and future instances. Note, you may already have an active policy for this resource type. +**Stream diagnostics logs from your Azure Logic Apps at scale** +**Launch the Azure Policy Assignment wizard and follow the steps.** + + > 1. In the **Basics** tab, click the button with the three dots under **Scope** to select your subscription.
2. In the **Parameters** tab, choose your Microsoft Sentinel workspace from the **Log Analytics workspace** drop-down list, and leave marked as "True" all the log categories you want to ingest.
3. To apply the policy on your existing resources, mark the **Create a remediation task** check box in the **Remediation** tab. + > 📋 **Additional Configuration Step**: This connector includes a configuration step of type `PolicyAssignment`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/azurensg.md b/Tools/Solutions Analyzer/connector-docs/connectors/azurensg.md index 5b4a7cb8ac6..6a814e2f7fe 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/azurensg.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/azurensg.md @@ -26,4 +26,27 @@ When you enable logging for an NSG, you can gather the following types of resour This connector lets you stream your NSG diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity in all your instances. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2223718&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. + +**Custom Permissions:** +- **Policy​**: owner role assigned for each policy assignment scope.​ + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Stream diagnostics logs from your Connect your Network Security Groups diagnostics logs into Sentinel. at scale** + +This connector uses Azure Policy to apply a single Azure Network Security Groups log-streaming configuration to a collection of instances, defined as a scope. +Follow the instructions below to create and apply a policy to all current and future instances. **Note**, you may already have an active policy for this resource type. +**Launch the Azure Policy Assignment wizard and follow the steps.​** + + >1. In the **Basics** tab, click the button with the three dots under **Scope** to select your resources assignment scope. + >2. In the **Parameters** tab, choose your Microsoft Sentinel workspace from the **Log Analytics workspace** drop-down list, and leave marked as "True" all the log and metric types you want to ingest. + >3. To apply the policy on your existing resources, select the **Remediation tab** and mark the **Create a remediation task** checkbox. + - **Configure policy assignment** + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/azuresecuritycenter.md b/Tools/Solutions Analyzer/connector-docs/connectors/azuresecuritycenter.md index bf88013d08f..8e867f38090 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/azuresecuritycenter.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/azuresecuritycenter.md @@ -14,4 +14,24 @@ Microsoft Defender for Cloud is a security management tool that allows you to de [For more information>](https://aka.ms/ASC-Connector) +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. + +**Custom Permissions:** +- **License**: The connector is available for all deployments of Microsoft Defender for Cloud. +- **Subscription**: [read security data](https://docs.microsoft.com/azure/security-center/security-center-permissions). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Microsoft Defender for Cloud to Microsoft Sentinel** + +Mark the check box of each Azure subscription whose alerts you want to import into Microsoft Sentinel, then select **Connect** above the list. + +> The connector can be enabled only on subscriptions that have at least one Microsoft Defender plan enabled in Microsoft Defender for Cloud, and only by users with Security Reader permissions on the subscription. +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `AzureSecurityCenterSubscriptions`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/azureservicebus-ccp.md b/Tools/Solutions Analyzer/connector-docs/connectors/azureservicebus-ccp.md index 78139aaa638..f9c62e9bb9a 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/azureservicebus-ccp.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/azureservicebus-ccp.md @@ -10,4 +10,25 @@ Azure Service Bus is a fully managed enterprise message broker with message queues and publish-subscribe topics (in a namespace). This connector lets you stream your Azure Service Bus diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Custom Permissions:** +- **Policy**: owner role assigned for each policy assignment scope + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect your Azure Service Bus diagnostics logs into Sentinel.** + +This connector uses Azure Policy to apply a single Azure Service Bus log-streaming configuration to a collection of instances, defined as a scope. Follow the instructions below to create and apply a policy to all current and future instances. Note, you may already have an active policy for this resource type. +**Stream diagnostics logs from your Azure Service Bus at scale** +**Launch the Azure Policy Assignment wizard and follow the steps.** + + > 1. In the **Basics** tab, click the button with the three dots under **Scope** to select your subscription.
2. In the **Parameters** tab, choose your Microsoft Sentinel workspace from the **Log Analytics workspace** drop-down list, and leave marked as "True" all the log categories you want to ingest.
3. To apply the policy on your existing resources, mark the **Create a remediation task** check box in the **Remediation** tab. + > 📋 **Additional Configuration Step**: This connector includes a configuration step of type `PolicyAssignment`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/azuresql.md b/Tools/Solutions Analyzer/connector-docs/connectors/azuresql.md index bbf7158dbf5..eb6e8799b2f 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/azuresql.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/azuresql.md @@ -10,4 +10,36 @@ Azure SQL is a fully managed, Platform-as-a-Service (PaaS) database engine that handles most database management functions, such as upgrading, patching, backups, and monitoring, without necessitating user involvement. This connector lets you stream your Azure SQL databases audit and diagnostic logs into Microsoft Sentinel, allowing you to continuously monitor activity in all your instances. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. + +**Custom Permissions:** +- **Policy​**: owner role assigned for each policy assignment scope.​ +- **Auditing**: read and write permissions to Azure SQL Server audit settings. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect your Azure SQL databases diagnostics logs into Sentinel.** + +This connector uses Azure Policy to apply a single Azure SQL Database log-streaming configuration to a collection of instances, defined as a scope. +Follow the instructions below to create and apply a policy to all current and future instances. **Note**, you may already have an active policy for this resource type. +**Stream diagnostics logs from your Azure SQL Databases at scale** +**Launch the Azure Policy Assignment wizard and follow the steps.​** + + >1. In the **Basics** tab, click the button with the three dots under **Scope** to select your resources assignment scope. + >2. In the **Parameters** tab, choose your Microsoft Sentinel workspace from the **Log Analytics workspace** drop-down list, and leave marked as "True" all the log and metric types you want to ingest. + >3. To apply the policy on your existing resources, select the **Remediation tab** and mark the **Create a remediation task** checkbox. + - **Configure policy assignment** +**Stream audit logs from your Azure SQL Databases at the server level at scale** +**Launch the Azure Policy Assignment wizard and follow the steps.​** + + >1. In the **Basics** tab, click the button with the three dots under **Scope** to select your resources assignment scope. + >2. In the **Parameters** tab, choose your Microsoft Sentinel workspace from the **Log Analytics workspace** drop-down list, and leave marked as "True" all the log and metric types you want to ingest. + >3. To apply the policy on your existing resources, select the **Remediation tab** and mark the **Create a remediation task** checkbox. + - **Configure policy assignment** + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/azurestorageaccount.md b/Tools/Solutions Analyzer/connector-docs/connectors/azurestorageaccount.md index 0e521bf50ed..36789d32c0b 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/azurestorageaccount.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/azurestorageaccount.md @@ -10,4 +10,45 @@ Azure Storage account is a cloud solution for modern data storage scenarios. It contains all your data objects: blobs, files, queues, tables, and disks. This connector lets you stream Azure Storage accounts diagnostics logs into your Microsoft Sentinel workspace, allowing you to continuously monitor activity in all your instances, and detect malicious activity in your organization. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220068&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Custom Permissions:** +- **Policy**: owner role assigned for each policy assignment scope + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect your Azure Storage Account diagnostics logs into Sentinel.** + +This connector uses a set of Azure Policies to apply a log-streaming configuration to a collection of instances, defined as a scope. Follow the instructions below to create and apply policies to all current and future instances. To get most out of the Storage Account Diagnostic logging from the Azure Storage Account, we recommend that you enable Diagnostic logging from all services within the Azure Storage Account - Blob, Queue, Table and File. Note, you may already have an active policy for this resource type. +**Stream diagnostics logs from your Azure Storage Account at scale** +**Launch the Azure Policy Assignment wizard and follow the steps.** + + > 1. In the **Basics** tab, click the button with the three dots under **Scope** to select your subscription.
2. In the **Parameters** tab, choose your Microsoft Sentinel workspace from the **Log Analytics workspace** drop-down list, and leave marked as "True" all the log categories you want to ingest.
3. To apply the policy on your existing resources, mark the **Create a remediation task** check box in the **Remediation** tab. + > 📋 **Additional Configuration Step**: This connector includes a configuration step of type `PolicyAssignment`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. +**Stream diagnostics logs from your Azure Storage Blob service at scale** +**Launch the Azure Policy Assignment wizard and follow the steps.** + + > 1. In the **Basics** tab, click the button with the three dots under **Scope** to select your subscription.
2. In the **Parameters** tab, choose your Microsoft Sentinel workspace from the **Log Analytics workspace** drop-down list, and leave marked as "True" all the log categories you want to ingest.
3. To apply the policy on your existing resources, mark the **Create a remediation task** check box in the **Remediation** tab. + > 📋 **Additional Configuration Step**: This connector includes a configuration step of type `PolicyAssignment`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. +**Stream diagnostics logs from your Azure Storage Queue service at scale** +**Launch the Azure Policy Assignment wizard and follow the steps.** + + > 1. In the **Basics** tab, click the button with the three dots under **Scope** to select your subscription.
2. In the **Parameters** tab, choose your Microsoft Sentinel workspace from the **Log Analytics workspace** drop-down list, and leave marked as "True" all the log categories you want to ingest.
3. To apply the policy on your existing resources, mark the **Create a remediation task** check box in the **Remediation** tab. + > 📋 **Additional Configuration Step**: This connector includes a configuration step of type `PolicyAssignment`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. +**Stream diagnostics logs from your Azure Storage Table service at scale** +**Launch the Azure Policy Assignment wizard and follow the steps.** + + > 1. In the **Basics** tab, click the button with the three dots under **Scope** to select your subscription.
2. In the **Parameters** tab, choose your Microsoft Sentinel workspace from the **Log Analytics workspace** drop-down list, and leave marked as "True" all the log categories you want to ingest.
3. To apply the policy on your existing resources, mark the **Create a remediation task** check box in the **Remediation** tab. + > 📋 **Additional Configuration Step**: This connector includes a configuration step of type `PolicyAssignment`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. +**Stream diagnostics logs from your Azure Storage File service at scale** +**Launch the Azure Policy Assignment wizard and follow the steps.** + + > 1. In the **Basics** tab, click the button with the three dots under **Scope** to select your subscription.
2. In the **Parameters** tab, choose your Microsoft Sentinel workspace from the **Log Analytics workspace** drop-down list, and leave marked as "True" all the log categories you want to ingest.
3. To apply the policy on your existing resources, mark the **Create a remediation task** check box in the **Remediation** tab. + > 📋 **Additional Configuration Step**: This connector includes a configuration step of type `PolicyAssignment`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/azurestreamanalytics-ccp.md b/Tools/Solutions Analyzer/connector-docs/connectors/azurestreamanalytics-ccp.md index e3cb57f6138..c3e2996deba 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/azurestreamanalytics-ccp.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/azurestreamanalytics-ccp.md @@ -10,4 +10,25 @@ Azure Stream Analytics is a real-time analytics and complex event-processing engine that is designed to analyze and process high volumes of fast streaming data from multiple sources simultaneously. This connector lets you stream your Azure Stream Analytics hub diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Custom Permissions:** +- **Policy**: owner role assigned for each policy assignment scope + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect your Azure Stream Analytics diagnostics logs into Sentinel.** + +This connector uses Azure Policy to apply a single Azure Stream Analytics log-streaming configuration to a collection of instances, defined as a scope. Follow the instructions below to create and apply a policy to all current and future instances. Note, you may already have an active policy for this resource type. +**Stream diagnostics logs from your Azure Stream Analytics at scale** +**Launch the Azure Policy Assignment wizard and follow the steps.** + + > 1. In the **Basics** tab, click the button with the three dots under **Scope** to select your subscription.
2. In the **Parameters** tab, choose your Microsoft Sentinel workspace from the **Log Analytics workspace** drop-down list, and leave marked as "True" all the log categories you want to ingest.
3. To apply the policy on your existing resources, mark the **Create a remediation task** check box in the **Remediation** tab. + > 📋 **Additional Configuration Step**: This connector includes a configuration step of type `PolicyAssignment`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/barracuda.md b/Tools/Solutions Analyzer/connector-docs/connectors/barracuda.md index 24add16f782..31d0956f51d 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/barracuda.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/barracuda.md @@ -14,4 +14,29 @@ The Barracuda Web Application Firewall (WAF) connector allows you to easily conn [For more information >​](https://aka.ms/CEF-Barracuda) +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Configure and connect Barracuda WAF** + +The Barracuda Web Application Firewall can integrate with and export logs directly to Microsoft Sentinel via Azure OMS Server.​ + +1. Go to [Barracuda WAF configuration](https://aka.ms/asi-barracuda-connector), and follow the instructions, using the parameters below to set up the connection:. + +2. Web Firewall logs facility: Go to the advanced settings (link below) for your workspace and on the **Data > Syslog** tabs, make sure that the facility exists.​ + +> Notice that the data from all regions will be stored in the selected workspace +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Open Syslog settings** + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/barracudacloudfirewall.md b/Tools/Solutions Analyzer/connector-docs/connectors/barracudacloudfirewall.md index b75cab6ea41..6dc1856dd64 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/barracudacloudfirewall.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/barracudacloudfirewall.md @@ -10,4 +10,48 @@ The Barracuda CloudGen Firewall (CGFW) connector allows you to easily connect your Barracuda CGFW logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +**Custom Permissions:** +- **Barracuda CloudGen Firewall**: must be configured to export logs via Syslog + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias CGFWFirewallActivity and load the function code or click [here](https://aka.ms/sentinel-barracudacloudfirewall-parser). The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + +1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. +2. Select **Apply below configuration to my machines** and select the facilities and severities. +3. Click **Save**. + +**2. Configure and connect the Barracuda CloudGen Firewall** + +[Follow instructions](https://aka.ms/sentinel-barracudacloudfirewall-connector) to configure syslog streaming. Use the IP address or hostname for the Linux machine with the Microsoft Sentinel agent installed for the Destination IP address. +- **Open Syslog settings** + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/bettermtd.md b/Tools/Solutions Analyzer/connector-docs/connectors/bettermtd.md index acf4f89b067..819cc5b7fe9 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/bettermtd.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/bettermtd.md @@ -10,4 +10,31 @@ The BETTER MTD Connector allows Enterprises to connect their Better MTD instances with Microsoft Sentinel, to view their data in Dashboards, create custom alerts, use it to trigger playbooks and expands threat hunting capabilities. This gives users more insight into their organization's mobile devices and ability to quickly analyze current mobile security posture which improves their overall SecOps capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +1. In **Better MTD Console**, click on **Integration** on the side bar. +2. Select **Others** tab. +3. Click the **ADD ACCOUNT** button and Select **Microsoft Sentinel** from the available integrations. +4. Create the Integration: + - set `ACCOUNT NAME` to a descriptive name that identifies the integration then click **Next** + - Enter your `WORKSPACE ID` and `PRIMARY KEY` from the fields below, click **Save** + - Click **Done** +5. Threat Policy setup (Which Incidents should be reported to `Microsoft Sentinel`): + - In **Better MTD Console**, click on **Policies** on the side bar + - Click on the **Edit** button of the Policy that you are using. + - For each Incident types that you want to be logged go to **Send to Integrations** field and select **Sentinel** +6. For additional information, please refer to our [Documentation](https://mtd-docs.bmobi.net/integrations/how-to-setup-azure-sentinel-integration#mtd-integration-configuration). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/beyondsecuritybesecure.md b/Tools/Solutions Analyzer/connector-docs/connectors/beyondsecuritybesecure.md index e602fa7f17e..39613cf20d8 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/beyondsecuritybesecure.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/beyondsecuritybesecure.md @@ -10,4 +10,35 @@ The [Beyond Security beSECURE](https://beyondsecurity.com/) connector allows you to easily connect your Beyond Security beSECURE scan events, scan results and audit trail with Azure Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Configure beSECURE** + +Follow the steps below to configure your beSECURE solution to send out scan results, scan status and audit trail to Azure Sentinel. +**1. Access the Integration menu** + + 1.1 Click on the 'More' menu option + +1.2 Select Server + +1.3 Select Integration + +1.4 Enable Azure Sentinel + + **2. Provide Azure Sentinel settings** + + Fill in the Workspace ID and Primary Key values, click 'Modify' + - **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + - **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/bigiddspmlogsconnectordefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/bigiddspmlogsconnectordefinition.md index 495e8134530..f19c4675593 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/bigiddspmlogsconnectordefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/bigiddspmlogsconnectordefinition.md @@ -10,4 +10,23 @@ The [BigID DSPM](https://bigid.com/data-security-posture-management/) data connector provides the capability to ingest BigID DSPM cases with affected objects and datasource information into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **BigID DSPM API access**: Access to the BigID DSPM API through a BigID Token is required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect to BigID DSPM API to start collecting BigID DSPM cases and affected Objects in Microsoft Sentinel** + +Provide your BigID domain name like 'customer.bigid.cloud' and your BigID token. Generate a token in the BigID console via Settings -> Access Management -> Users -> Select User and generate a token. +- **BigID FQDN**: BigID FQDN +- **BigID Token**: (password field) +- Click 'Connect' to establish connection + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/bitglass.md b/Tools/Solutions Analyzer/connector-docs/connectors/bitglass.md index 0c3ee0f014b..9d5514de6e7 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/bitglass.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/bitglass.md @@ -10,4 +10,96 @@ The [Bitglass](https://www.bitglass.com/) data connector provides the capability to retrieve security event logs of the Bitglass services and more events into Microsoft Sentinel through the REST API. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **BitglassToken** and **BitglassServiceURL** are required for making API calls. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Azure Blob Storage API to pull logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Bitglass**](https://aka.ms/sentinel-bitglass-parser) which is deployed with the Microsoft Sentinel Solution. + +**STEP 1 - Configuration steps for the Bitglass Log Retrieval API** + + Follow the instructions to obtain the credentials. + +1. Please contact Bitglass [support](https://pages.bitglass.com/Contact.html) and obtain the **BitglassToken** and **BitglassServiceURL** ntation]. +2. Save credentials for using in the data connector. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Bitglass data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Bitglass data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-bitglass-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. +3. Enter the **BitglassToken**, **BitglassServiceURL** and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Bitglass data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-bitglass-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitglassXXXXX). + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select ** New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + BitglassToken + BitglassServiceURL + WorkspaceID + WorkspaceKey + logAnalyticsUri (optional) +> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/bitsight.md b/Tools/Solutions Analyzer/connector-docs/connectors/bitsight.md index 377945bfda5..ea9b6581ac4 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/bitsight.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/bitsight.md @@ -10,4 +10,235 @@ The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: BitSight API Token is required. See the documentation to [learn more](https://help.bitsighttech.com/hc/en-us/articles/115014888388-API-Token-Management) about API Token. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the BitSight API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Steps to Create/Get Bitsight API Token** + + Follow these instructions to get a BitSight API Token. + 1. For SPM App: Refer to the [User Preference](https://service.bitsight.com/app/spm/account) tab of your Account page, + Go to Settings > Account > User Preferences > API Token. + 2. For TPRM App: Refer to the [User Preference](https://service.bitsight.com/app/tprm/account) tab of your Account page, + Go to Settings > Account > User Preferences > API Token. + 3. For Classic BitSight: Go to your [Account](https://service.bitsight.com/settings) page, + Go to Settings > Account > API Token. + +**STEP 2 - App Registration steps for the Application in Microsoft Entra ID** + + This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID: + 1. Sign in to the [Azure portal](https://portal.azure.com/). + 2. Search for and select **Microsoft Entra ID**. + 3. Under **Manage**, select **App registrations > New registration**. + 4. Enter a display **Name** for your application. + 5. Select **Register** to complete the initial app registration. + 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of BitSight Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app) + +**STEP 3 - Add a client secret for application in Microsoft Entra ID** + + Sometimes called an application password, a client secret is a string value required for the execution of BitSight Data Connector. Follow the steps in this section to create a new Client Secret: + 1. In the Azure portal, in **App registrations**, select your application. + 2. Select **Certificates & secrets > Client secrets > New client secret**. + 3. Add a description for your client secret. + 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months. + 5. Select **Add**. + 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of BitSight Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret) + +**STEP 4 - Get Object ID of your application in Microsoft Entra ID** + + After creating your app registration, follow the steps in this section to get Object ID: + 1. Go to **Microsoft Entra ID**. + 2. Select **Enterprise applications** from the left menu. + 3. Find your newly created application in the list (you can search by the name you provided). + 4. Click on the application. + 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment. + +**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID** + + Follow the steps in this section to assign the role: + 1. In the Azure portal, Go to **Resource Group** and select your resource group. + 2. Go to **Access control (IAM)** from left panel. + 3. Click on **Add**, and then select **Add role assignment**. + 4. Select **Contributor** as role and click on next. + 5. In **Assign access to**, select `User, group, or service principal`. + 6. Click on **add members** and type **your app name** that you have created and select it. + 7. Now click on **Review + assign** and then again click on **Review + assign**. + +> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal) + +**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the BitSight data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the BitSight API Token. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**7. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the BitSight connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BitSight-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the below information : + + a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. + + b. **API_token** - Enter API Token of your BitSight account. + + c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. + + d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. + + e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. + + f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. + + g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. + + h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. + + i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics "Settings". + + j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. + + k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. + + l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. + + m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. + + n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. + + o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. + + p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. + + q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. + + r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. + + s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. + + t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. + + u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. + + v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. + + w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). + + x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). + + y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. +4. Once all application settings have been entered, click **Review + create** to deploy.. + +**8. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the BitSight data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-BitSight310-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitSightXXXXX). + + e. **Select a runtime:** Choose Python 3.8 or above. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective values (case-sensitive): + + a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. + + b. **API_token** - Enter API Token of your BitSight account. + + c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. + + d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. + + e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. + + f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. + + g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. + + h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. + + i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics "Settings". + + j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. + + k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. + + l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. + + m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. + + n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. + + o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. + + p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. + + q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. + + r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. + + s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. + + t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. + + u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. + + v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. + + w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). + + x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). + + y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. +4. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/bitwardeneventlogs.md b/Tools/Solutions Analyzer/connector-docs/connectors/bitwardeneventlogs.md index 086b47cd53a..5d299c4a591 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/bitwardeneventlogs.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/bitwardeneventlogs.md @@ -10,4 +10,29 @@ This connector provides insight into activity of your Bitwarden organization such as user's activity (logged in, changed password, 2fa, etc.), cipher activity (created, updated, deleted, shared, etc.), collection activity, organization activity, and more. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +**Custom Permissions:** +- **Bitwarden Client Id and Client Secret**: Your API key can be found in the Bitwarden organization admin console. Please see [Bitwarden documentation](https://bitwarden.com/help/public-api/#authentication) for more information. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Bitwarden Event Logs to Microsoft Sentinel** + +Your API key can be found in the Bitwarden organization admin console. +Please see [Bitwarden documentation](https://bitwarden.com/help/public-api/#authentication) for more information. +Self-hosted Bitwarden servers may need to reconfigure their installation's URL. +- **Bitwarden Identity Url**: https://identity.bitwarden.com +- **Bitwarden Api Url**: https://api.bitwarden.com +- **OAuth Configuration**: + - Client ID + - Client Secret + - Click 'Connect' to authenticate + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/blackberrycylanceprotect.md b/Tools/Solutions Analyzer/connector-docs/connectors/blackberrycylanceprotect.md index 7f2cf35e8b6..4422d1d42a6 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/blackberrycylanceprotect.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/blackberrycylanceprotect.md @@ -10,4 +10,49 @@ The [Blackberry CylancePROTECT](https://www.blackberry.com/us/en/products/blackberry-protect) connector allows you to easily connect your CylancePROTECT logs with Microsoft Sentinel. This gives you more insight into your organization's network and improves your security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission. + +**Custom Permissions:** +- **CylancePROTECT**: must be configured to export logs via Syslog. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias CyclanePROTECT and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Blackberry%20CylancePROTECT/Parsers/CylancePROTECT.txt), on the second line of the query, enter the hostname(s) of your CyclanePROTECT device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + +1. Select the link below to open your workspace **agents configuration**, and select the **Syslog** tab. +2. Select **Add facility** and choose from the drop-down list of facilities. Repeat for all the facilities you want to add. +3. Mark the check boxes for the desired severities for each facility. +4. Click **Apply**. +- **Open Syslog settings** + +**3. Configure and connect the CylancePROTECT** + +[Follow these instructions](https://docs.blackberry.com/content/dam/docs-blackberry-com/release-pdfs/en/cylance-products/syslog-guides/Cylance%20Syslog%20Guide%20v2.0%20rev12.pdf) to configure the CylancePROTECT to forward syslog. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/bloodhoundenterprise.md b/Tools/Solutions Analyzer/connector-docs/connectors/bloodhoundenterprise.md index 4e98cb62ff9..735ed78c05e 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/bloodhoundenterprise.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/bloodhoundenterprise.md @@ -10,4 +10,159 @@ The solution is designed to test Bloodhound Enterprise package creation process. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **BloodHound Enterprise API key & Id** is required. See the documentation to learn more about API on the `https://bloodhound.specterops.io/integrations/bloodhound-api/working-with-api`. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to a 'BloodHound Enterprise' to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +**STEP 1 - Retrieve BloodHound Enterprise API Key and ID** + +To enable the Azure Function to authenticate successfully and pull logs into Microsoft Sentinel, you must first obtain the API Key and ID from your BloodHound Enterprise instance. See the documentation to learn more about API on the `https://bloodhound.specterops.io/integrations/bloodhound-api/working-with-api`. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the 'BloodHound Enterprise' connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the 'BloodHound Enterprise' API authorization key(s) or Token, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the 'BloodHound Enterprise' connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)]() +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Tenant URL**, **API Key**, **API ID** 'and/or Other required fields'. +>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**Option 2 - Manual Deployment of Azure Functions** + + Use the following step-by-step instructions to deploy the 'BloodHound Enterprise' connector manually with Azure Functions. + +**1. Create a Function App** + +1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp). +2. Click **+ Create** at the top. +3. In the **Basics** tab, ensure Runtime stack is set to **python 3.11**. +4. In the **Hosting** tab, ensure **Plan type** is set to **'Consumption (Serverless)'**. +5.select Storage account +6. 'Add other required configurations'. +5. 'Make other preferable configuration changes', if needed, then click **Create**. + +**2. Import Function App Code(Zip deployment)** + +1. Install Azure CLI +2. From terminal type **az functionapp deployment source config-zip -g -n --src ** and hit enter. Set the `ResourceGroup` value to: your resource group name. Set the `FunctionApp` value to: your newly created function app name. Set the `Zip File` value to: `digitalshadowsConnector.zip`(path to your zip file). Note:- Download the zip file from the link - [Function App Code](https://github.com/metron-labs/Azure-Sentinel/blob/bloodhound/Solutions/BloodHound/Data%20Connectors/BloodHoundAzureFunction.zip) + +**3. Configure the Function App** + +1. In the Function App screen, click the Function App name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following 'x (number of)' application settings individually, under Name, with their respective string values (case-sensitive) under Value: + DigitalShadowsAccountID + WorkspaceID + WorkspaceKey + DigitalShadowsKey + DigitalShadowsSecret + HistoricalDays + DigitalShadowsURL + ClassificationFilterOperation + HighVariabilityClassifications + FUNCTION_NAME + logAnalyticsUri (optional) +(add any other settings required by the Function App) +Set the `DigitalShadowsURL` value to: `https://api.searchlight.app/v1` +Set the `HighVariabilityClassifications` value to: `exposed-credential,marked-document` +Set the `ClassificationFilterOperation` value to: `exclude` for exclude function app or `include` for include function app +>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Azure Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: https://.ods.opinsights.azure.us. +4. Once all application settings have been entered, click **Save**. + +****STEP 3 - Register the Application in Microsoft Entra ID** + + 1. **Open the [Microsoft Entra ID page](https://entra.microsoft.com/)**: + - Click the provided link to open the **Microsoft Entra ID** registration page in a new tab. + - Ensure you are logged in with an account that has **Admin level** permissions. + +2. **Create a New Application**: + - In the **Microsoft Entra ID portal**, select **App registrations** mentioned on the left-hand side tab. + - Click on **+ New registration**. + - Fill out the following fields: + - **Name**: Enter a name for the app (e.g., “BloodHound App”). + - **Supported account types**: Choose **Accounts in this organizational directory only** (Default Directory only - Single tenant). + - **Redirect URI**: Leave this blank unless required otherwise. + - Click **Register** to create the application. + +3. **Copy Application and Tenant IDs**: + - Once the app is registered, note the **Application (client) ID** and **Directory (tenant) ID** from the **Overview** page. You’ll need these for the integration. + +4. **Create a Client Secret**: + - In the **Certificates & secrets** section, click **+ New client secret**. + - Add a description (e.g., 'BloodHound Secret') and set an expiration (e.g., 1 year). + - Click **Add**. + - **Copy the client secret value immediately**, as it will not be shown again. + + ****STEP 4 - Assign the "Monitoring Metrics Publisher" Role to the App** + + 1. **Open the Resource Group in Azure Portal**: + - Navigate to the **Resource Group** that contains the **Log Analytics Workspace** and **Data Collection Rules (DCRs)** where you want the app to push data. + +2. **Assign the Role**: + - In the **Resource Group** menu, click on **Access control (IAM)** mentioned on the left-hand side tab .. + - Click on **+ Add** and select **Add role assignment**. + - In the **Role** dropdown, search for and select the **Monitoring Metrics Publisher** role. + - Under **Assign access to**, choose **Azure AD user, group, or service principal**. + - In the **Select** field, search for your registered app by **name** or **client ID**. + - Click **Save** to assign the role to the application. + + ****STEP 5 - Deploy the ARM Template** + + 1. **Retrieve the Workspace ID**: + - After assigning the role, you will need the **Workspace ID**. + - Navigate to the **Log Analytics Workspace** within the **Resource Group**. + - In the **Overview** section, locate the **Workspace ID** field under **Workspace details**. + - **Copy the Workspace ID** and keep it handy for the next steps. + +2. **Click the Deploy to Azure Button**: + - [![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fmetron-labs%2FAzure-Sentinel%2Fbloodhound%2FSolutions%2FBloodHound%2FData%2520Connectors%2FDeployToAzure.json). + - This will take you directly to the Azure portal to start the deployment. + +3. **Review and Customize Parameters**: + - On the custom deployment page, ensure you’re deploying to the correct **subscription** and **resource group**. + - Fill in the parameters like **workspace name**, **workspace ID**, and **workspace location**. + +4. **Click Review + Create** and then **Create** to deploy the resources. + + ****STEP 6 - Verify DCE, DCR, and Log Analytics Table Setup** + + 1. **Check the Data Collection Endpoint (DCE)**: + - After deploying, go to **Azure Portal > Data Collection Endpoints**. + - Verify that the **BloodHoundDCE** endpoint has been created successfully. + - **Copy the DCE Logs Ingestion URI**, as you’ll need this for generating the webhook URL. + +2. **Confirm Data Collection Rule (DCR) Setup**: + - Go to **Azure Portal > Data Collection Rules**. + - Ensure the **BloodHoundDCR** rule is present. + - **Copy the Immutable ID** of the DCR from the Overview page, as you’ll need it for the webhook URL. + +3. **Validate Log Analytics Table**: + - Navigate to your **Log Analytics Workspace** (linked to Microsoft Sentinel). + - Under the **Tables** section, verify that the **BloodHoundTable_CL** table has been created successfully and is ready to receive data. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/boschaishield.md b/Tools/Solutions Analyzer/connector-docs/connectors/boschaishield.md index ed12204b49f..126f7ea457e 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/boschaishield.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/boschaishield.md @@ -10,4 +10,25 @@ [AIShield](https://www.boschaishield.com/) connector allows users to connect with AIShield custom defense mechanism logs with Microsoft Sentinel, allowing the creation of dynamic Dashboards, Workbooks, Notebooks and tailored Alerts to improve investigation and thwart attacks on AI systems. It gives users more insight into their organization's AI assets security posturing and improves their AI systems security operation capabilities.AIShield.GuArdIan analyzes the LLM generated content to identify and mitigate harmful content, safeguarding against legal, policy, role based, and usage based violations +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Note**: Users should have utilized AIShield SaaS offering to conduct vulnerability analysis and deployed custom defense mechanisms generated along with their AI asset. [**Click here**](https://azuremarketplace.microsoft.com/marketplace/apps/rbei.bgsw_aishield_product) to know more or get in touch. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**AIShield**](https://aka.ms/sentinel-boschaishield-parser) which is deployed with the Microsoft Sentinel Solution. + +>**IMPORTANT:** Before deploying the AIShield Connector, have the Workspace ID and Workspace Primary Key (can be copied from the following). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/boxdataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/boxdataconnector.md index 6fc6fd9ddba..acbbb9798e2 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/boxdataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/boxdataconnector.md @@ -10,4 +10,70 @@ The Box data connector provides the capability to ingest [Box enterprise's events](https://developer.box.com/guides/events/#admin-events) into Microsoft Sentinel using the Box REST API. Refer to [Box documentation](https://developer.box.com/guides/events/enterprise-events/for-enterprise/) for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Box API Credentials**: Box config JSON file is required for Box REST API JWT authentication. [See the documentation to learn more about JWT authentication](https://developer.box.com/guides/authentication/jwt/). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Box REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This connector depends on a parser based on Kusto Function to work as expected [**BoxEvents**](https://aka.ms/sentinel-BoxDataConnector-parser) which is deployed with the Microsoft Sentinel Solution. + +**STEP 1 - Configuration of the Box events collection** + +See documentation to [setup JWT authentication](https://developer.box.com/guides/authentication/jwt/jwt-setup/) and [obtain JSON file with credentials](https://developer.box.com/guides/authentication/jwt/with-sdk/#prerequisites). + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Box data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Box JSON configuration file, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Option 1 - Azure Resource Manager (ARM) Template** + + Use this method for automated deployment of the Box data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BoxDataConnector-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **AzureSentinelWorkspaceId**, **AzureSentinelSharedKey**, **BoxConfigJSON** +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + + **Option 2 - Manual Deployment of Azure Functions** + + Use the following step-by-step instructions to deploy the Box data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + 1. Download the [Azure Function App](https://aka.ms/sentinel-BoxDataConnector-functionapp) file. Extract archive to your local development computer. +2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode. +3. After successful deployment of the function app, follow next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. Go to Azure Portal for the Function App configuration. +2. In the Function App, select the Function App Name and select **Configuration**. +3. In the **Application settings** tab, select **+ New application setting**. +4. Add each of the following application settings individually, with their respective string values (case-sensitive): + AzureSentinelWorkspaceId + AzureSentinelSharedKey + BOX_CONFIG_JSON + logAnalyticsUri (optional) +> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +5. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/boxeventsccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/boxeventsccpdefinition.md index 211c2da87c4..19c227138d9 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/boxeventsccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/boxeventsccpdefinition.md @@ -10,4 +10,42 @@ The Box data connector provides the capability to ingest [Box enterprise's events](https://developer.box.com/guides/events/#admin-events) into Microsoft Sentinel using the Box REST API. Refer to [Box documentation](https://developer.box.com/guides/events/enterprise-events/for-enterprise/) for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **Box API credentials**: Box API requires a Box App client ID and client secret to authenticate. [See the documentation to learn more about Client Credentials grant](https://developer.box.com/guides/authentication/client-credentials/client-credentials-setup/) +- **Box Enterprise ID**: Box Enterprise ID is required to make the connection. See documentation to [find Enterprise ID](https://developer.box.com/platform/appendix/locating-values/) + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Codeless Connecor Platform (CCP) to connect to the Box REST API to pull logs into Microsoft Sentinel. + +>**NOTE:** This connector depends on a parser based on Kusto Function to work as expected [**BoxEvents**](https://aka.ms/sentinel-BoxDataConnector-parser) which is deployed with the Microsoft Sentinel Solution. + +**STEP 1 - Create Box Custom Application** + +See documentation to [setup client credentials authentication](https://developer.box.com/guides/authentication/client-credentials/client-credentials-setup/) + +**STEP 2 - Grab Client ID and Client Secret values** + +You might need to setup 2FA to fetch the secret. + +**STEP 3 - Grab Box Enterprise ID from Box Admin Console** + +See documentation to [find Enterprise ID](https://developer.box.com/platform/appendix/locating-values/) + +**4. Connect to Box to start collecting event logs to Microsoft Sentinel** + +Provide the required values below: +- **Box Enterprise ID**: 123456 +- **OAuth Configuration**: + - Client ID + - Client Secret + - Click 'Connect' to authenticate + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/broadcomsymantecdlp.md b/Tools/Solutions Analyzer/connector-docs/connectors/broadcomsymantecdlp.md index 4d3190d34e3..684de46c850 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/broadcomsymantecdlp.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/broadcomsymantecdlp.md @@ -10,4 +10,61 @@ The [Broadcom Symantec Data Loss Prevention (DLP)](https://www.broadcom.com/products/cyber-security/information-protection/data-loss-prevention) connector allows you to easily connect your Symantec DLP with Microsoft Sentinel, to create custom dashboards, alerts, and improve investigation. This gives you more insight into your organization’s information, where it travels, and improves your security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias SymantecDLP and load the function code or click [here](https://aka.ms/sentinel-symantecdlp-parser). The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python –version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Symantec DLP logs to a Syslog agent** + +Configure Symantec DLP to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent. +1. [Follow these instructions](https://knowledge.broadcom.com/external/article/159509/generating-syslog-messages-from-data-los.html) to configure the Symantec DLP to forward syslog +2. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python –version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/broadcomsymantecdlpama.md b/Tools/Solutions Analyzer/connector-docs/connectors/broadcomsymantecdlpama.md index e082e6287c9..b479fac3309 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/broadcomsymantecdlpama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/broadcomsymantecdlpama.md @@ -10,4 +10,61 @@ The [Broadcom Symantec Data Loss Prevention (DLP)](https://www.broadcom.com/products/cyber-security/information-protection/data-loss-prevention) connector allows you to easily connect your Symantec DLP with Microsoft Sentinel, to create custom dashboards, alerts, and improve investigation. This gives you more insight into your organization’s information, where it travels, and improves your security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias SymantecDLP and load the function code or click [here](https://aka.ms/sentinel-symantecdlp-parser). The function usually takes 10-15 minutes to activate after solution installation/update. +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Symantec DLP logs to a Syslog agent** + + Configure Symantec DLP to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent. +1. [Follow these instructions](https://knowledge.broadcom.com/external/article/159509/generating-syslog-messages-from-data-los.html) to configure the Symantec DLP to forward syslog +2. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/carbonblackawss3.md b/Tools/Solutions Analyzer/connector-docs/connectors/carbonblackawss3.md index aeaca3628f3..524cfc93051 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/carbonblackawss3.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/carbonblackawss3.md @@ -10,4 +10,86 @@ The [VMware Carbon Black Cloud](https://www.vmware.com/products/carbon-black-cloud.html) via AWS S3 data connector provides the capability to ingest watchlist, alerts, auth and endpoints events via AWS S3 and stream them to ASIM normalized tables. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission. + +**Custom Permissions:** +- **Environment**: You must have the following AWS resources defined and configured: S3, Simple Queue Service (SQS), IAM roles and permissions policies +- **Environment**: You must have the a Carbon black account and required permissions to create a Data Forwarded to AWS S3 buckets. +For more details visit [Carbon Black Data Forwarder Docs](https://docs.vmware.com/en/VMware-Carbon-Black-Cloud/services/carbon-black-cloud-user-guide/GUID-E8D33F72-BABB-4157-A908-D8BBDB5AF349.html) + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +#### 1. AWS CloudFormation Deployment + To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from S3 bucket to your Log Analytics Workspace. + #### For each template, create Stack in AWS: + 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create) + 2. In AWS, choose the 'Upload a template file' option and click on 'Choose file'. Select the downloaded template + 3. Click 'Next' and 'Create stack' +- **Template 1: OpenID connect authentication deployment**: `Oidc` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Template 2: AWS Carbon Black resources deployment**: `CarbonBlack` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +When deploying 'Template 2: AWS Carbon Black resources deployment' template you'll need supply a few parameters + * **Stack Name**: A stack name of your choosing (will appear in the list of stacks in AWS) + * **Role Name**: Must begin with 'OIDC_' prefix, has a default value. + * **Bucket Name**: Bucket name of your choosing, if you already have an existing bucket paste the name here + * **CreateNewBucket**: If you already have an existing bucket that you would like to use for this connector select 'false' for this option, otherwise a bucket with the name you entered in 'Bucket Name' will be created from this stack. + * **Region**: This is the region of the AWS resources based on Carbon Black's mapping - for more information please see [Carbon Black documentation](https://developer.carbonblack.com/reference/carbon-black-cloud/integrations/data-forwarder/quick-setup/#create-a-bucket). + * **SQSQueuePrefix**: The stack create multiple queues, this prefix will be added to each one of them. + * **WorkspaceID**: Use the Workspace ID provided below. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +Once the deployment is complete - head to the 'Outputs' tab, you will see: Role ARN, S3 bucket and 4 SQS resources created. You will need those resources in the next step when configuring Carbon Black's data forwarders and the data connector. +#### 2. Carbon Black data forwarder configuration + After all AWS resources has been created you'll need to configure Carbon Black to forward the events to the AWS buckets for Microsoft Sentinel to ingest them. Follow [Carbon Black's documentation on how to create a 'Data Forwarders'](https://developer.carbonblack.com/reference/carbon-black-cloud/integrations/data-forwarder/quick-setup/#2-create-a-forwarder) Use the first recommended option. When asked to input a bucket name use the bucket created in the previous step. + You will be required to add 'S3 prefix' for each forwarder, please use this mapping: + | Event type | S3 prefix | + |-----------------|-----------| + | Alert | carbon-black-cloud-forwarder/Alerts | + | Auth Events | carbon-black-cloud-forwarder/Auth | + | Endpoint Events | carbon-black-cloud-forwarder/Endpoint | + | Watchlist Hit | carbon-black-cloud-forwarder/Watchlist | +#### 2.1. Test your data forwarder (Optional) + To validate the data forwarder is configured as expected, in Carbon Black's portal search for the data forwarder that you just created and click on 'Test Forwarder' button under the 'Actions' column, this will generate a 'HealthCheck' file in the S3 Bucket, you should see it appear immediately. +#### 3. Connect new collectors + To enable AWS S3 for Microsoft Sentinel, click the 'Add new collector' button, fill the required information, the ARN role and the SQS URL are created in step 1, note that you will need to enter the correct SQS URL and select the appropriate event type from the dropdown, for example if you want to ingest Alert events you will need to copy the Alerts SQS URL and select the 'Alerts' event type in the dropdown + +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Role ARN** +- **Queue URL** +- **Stream name** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + +**Add new controller** + +*AWS S3 connector* + +When you click the "Add new collector" button in the portal, a configuration form will open. You'll need to provide: + +*Account details* + +- **Role ARN** (required) +- **Queue URL** (required) +- **Data type** (required): Select from available options + - Alerts + - Auth Events + - Endpoint Events + - Watchlist + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/cbspollingidazurefunctions.md b/Tools/Solutions Analyzer/connector-docs/connectors/cbspollingidazurefunctions.md index 62ee307c5b5..e5fb607566b 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/cbspollingidazurefunctions.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/cbspollingidazurefunctions.md @@ -10,4 +10,90 @@ Through the API integration, you have the capability to retrieve all the issues related to your CBS organizations via a RESTful interface. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to a 'CyberBlindSpot' to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Configuration steps for the 'CyberBlindSpot' API** + +The provider should provide or link to detailed steps to configure the 'CyberBlindSpot' API endpoint so that the Azure Function can authenticate to it successfully, get its authorization key or token, and pull the appliance's logs into Microsoft Sentinel. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the 'CyberBlindSpot' connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the 'CyberBlindSpot' API authorization key(s) readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the 'CyberBlindSpot' connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CTM360-CBS-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CTM360-CBS-azuredeploy-gov) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID**, **Workspace Key**, **API **, 'and/or Other required fields'. +>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the CTM360 CBS data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development. + +1. Download the [Azure Function App](https://raw.githubusercontent.com/CTM360-Integrations/Azure-Sentinel/ctm360-HV-CBS-azurefunctionapp/Solutions/CTM360/Data%20Connectors/CBS/AzureFunctionCTM360_CBS.zip) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CTIXYZ). + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + CTM360AccountID + WorkspaceID + WorkspaceKey + CTM360Key + FUNCTION_NAME + logAnalyticsUri - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +3. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/cef.md b/Tools/Solutions Analyzer/connector-docs/connectors/cef.md index ec1f9bbe620..3f06ce6e42c 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/cef.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/cef.md @@ -10,4 +10,57 @@ Common Event Format (CEF) is an industry standard format on top of Syslog messages, used by many security vendors to allow event interoperability among different platforms. By connecting your CEF logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2223902&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. +- **Keys** (Workspace): read permissions to shared keys for the workspace. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python --version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Common Event Format (CEF) logs to Syslog agent** + +Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine’s IP address. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python --version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/cefama.md b/Tools/Solutions Analyzer/connector-docs/connectors/cefama.md index 92e8aad7402..a2392739ad9 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/cefama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/cefama.md @@ -10,4 +10,24 @@ Common Event Format (CEF) is an industry standard format on top of Syslog messages, used by many security vendors to allow event interoperability among different platforms. By connecting your CEF logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2223547&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +## Permissions + +**Resource Provider Permissions:** +- **Workspace data sources** (Workspace): read and write permissions. + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Enable data collection rule​** + +> CEF Events logs are collected only from **Linux** agents. +- Configure CefAma data connector + +- **Create data collection rule** + +**2. Run the following command to install and apply the CEF collector:** + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/checkpointcyberintalerts.md b/Tools/Solutions Analyzer/connector-docs/connectors/checkpointcyberintalerts.md index 37e472917c4..0b92c4adb74 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/checkpointcyberintalerts.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/checkpointcyberintalerts.md @@ -10,4 +10,25 @@ Cyberint, a Check Point company, provides a Microsoft Sentinel integration to streamline critical Alerts and bring enriched threat intelligence from the Infinity External Risk Management solution into Microsoft Sentinel. This simplifies the process of tracking the status of tickets with automatic sync updates across systems. Using this new integration for Microsoft Sentinel, existing Cyberint and Microsoft Sentinel customers can easily pull logs based on Cyberint's findings into Microsoft Sentinel platform. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **Check Point Cyberint API Key, Argos URL, and Customer Name**: The connector API key, Argos URL, and Customer Name are required + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Checkpoint Cyberint Alerts to Microsoft Sentinel** + +To enable the connector provide the required information below and click on Connect. +> +- **Argos URL**: Argos URL +- **API Token**: (password field) +- **Customer Name**: Customer Name +- Click 'Connect' to establish connection + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/checkpointcyberintioc.md b/Tools/Solutions Analyzer/connector-docs/connectors/checkpointcyberintioc.md index a7d900e3763..6a05f41c226 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/checkpointcyberintioc.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/checkpointcyberintioc.md @@ -10,4 +10,24 @@ This is data connector for Check Point Cyberint IOC. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **Check Point Cyberint API Key and Argos URL**: The connector API key and Argos URL are required + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Checkpoint Cyberint Alerts to Microsoft Sentinel** + +To enable the connector provide the required information below and click on Connect. +> +- **Argos URL**: Argos URL +- **API key**: API key +- Click 'Connect' to establish connection + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ciscoaci.md b/Tools/Solutions Analyzer/connector-docs/connectors/ciscoaci.md index 6b73f500913..bdc75913928 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ciscoaci.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ciscoaci.md @@ -10,4 +10,57 @@ [Cisco Application Centric Infrastructure (ACI)](https://www.cisco.com/c/en/us/solutions/collateral/data-center-virtualization/application-centric-infrastructure/solution-overview-c22-741487.html) data connector provides the capability to ingest [Cisco ACI logs](https://www.cisco.com/c/en/us/td/docs/switches/datacenter/aci/apic/sw/all/syslog/guide/b_ACI_System_Messages_Guide/m-aci-system-messages-reference.html) into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**CiscoACIEvent**](https://aka.ms/sentinel-CiscoACI-parser) which is deployed with the Microsoft Sentinel Solution. + +>**NOTE:** This data connector has been developed using Cisco ACI Release 1.x + +**1. Configure Cisco ACI system sending logs via Syslog to remote server where you will install the agent.** + +[Follow these steps](https://www.cisco.com/c/en/us/td/docs/switches/datacenter/aci/apic/sw/1-x/basic-config/b_ACI_Config_Guide/b_ACI_Config_Guide_chapter_010.html#d2933e4611a1635) to configure Syslog Destination, Destination Group, and Syslog Source. + +**2. Install and onboard the agent for Linux or Windows** + +Install the agent on the Server to which the logs will be forwarded. + +> Logs on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**3. Check logs in Microsoft Sentinel** + +Open Log Analytics to check if the logs are received using the Syslog schema. + +>**NOTE:** It may take up to 15 minutes before new logs will appear in Syslog table. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ciscoasa.md b/Tools/Solutions Analyzer/connector-docs/connectors/ciscoasa.md index 79bc3871aa4..d5a47aaa65c 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ciscoasa.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ciscoasa.md @@ -10,4 +10,65 @@ The Cisco ASA firewall connector allows you to easily connect your Cisco ASA logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. +- **Keys** (Workspace): read permissions to shared keys for the workspace. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python --version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Cisco ASA logs to Syslog agent** + +Configure Cisco ASA to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent. + +Go to [Send Syslog messages to an external Syslog server](https://aka.ms/asi-syslog-cisco-forwarding), and follow the instructions to set up the connection. Use these parameters when prompted: + +1. Set "port" to 514. +2. Set "syslog_ip" to the IP address of the Syslog agent. + + +[Learn more >](https://aka.ms/CEFCisco) + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python --version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ciscoasaama.md b/Tools/Solutions Analyzer/connector-docs/connectors/ciscoasaama.md index 2da15b6d4c6..8d45549d3cc 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ciscoasaama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ciscoasaama.md @@ -10,4 +10,24 @@ The Cisco ASA firewall connector allows you to easily connect your Cisco ASA logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace data sources** (Workspace): read and write permissions. + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Enable data collection rule​** + +> Cisco ASA/FTD event logs are collected only from **Linux** agents. +- Configure CiscoAsaAma data connector + +- **Create data collection rule** + +**2. Run the following command to install and apply the Cisco ASA/FTD collector:** + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ciscoduosecurity.md b/Tools/Solutions Analyzer/connector-docs/connectors/ciscoduosecurity.md index 7ee1e63a98f..11eb6721a05 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ciscoduosecurity.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ciscoduosecurity.md @@ -10,4 +10,72 @@ The Cisco Duo Security data connector provides the capability to ingest [authentication logs](https://duo.com/docs/adminapi#authentication-logs), [administrator logs](https://duo.com/docs/adminapi#administrator-logs), [telephony logs](https://duo.com/docs/adminapi#telephony-logs), [offline enrollment logs](https://duo.com/docs/adminapi#offline-enrollment-logs) and [Trust Monitor events](https://duo.com/docs/adminapi#trust-monitor) into Microsoft Sentinel using the Cisco Duo Admin API. Refer to [API documentation](https://duo.com/docs/adminapi) for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Cisco Duo API credentials**: Cisco Duo API credentials with permission *Grant read log* is required for Cisco Duo API. See the [documentation](https://duo.com/docs/adminapi#first-steps) to learn more about creating Cisco Duo API credentials. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Cisco Duo API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**CiscoDuo**](https://aka.ms/sentinel-CiscoDuoSecurity-parser) which is deployed with the Microsoft Sentinel Solution. + +**STEP 1 - Obtaining Cisco Duo Admin API credentials** + +1. Follow [the instructions](https://duo.com/docs/adminapi#first-steps) to obtain **integration key**, **secret key**, and **API hostname**. Use **Grant read log** permission in the 4th step of [the instructions](https://duo.com/docs/adminapi#first-steps). + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Azure Blob Storage connection string and container name, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CiscoDuoSecurity-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CiscoDuoSecurity-azuredeploy-gov) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Cisco Duo Integration Key**, **Cisco Duo Secret Key**, **Cisco Duo API Hostname**, **Cisco Duo Log Types**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key** +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + 1. Download the [Azure Function App](https://aka.ms/sentinel-CiscoDuoSecurity-functionapp) file. Extract archive to your local development computer. +2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode. +3. After successful deployment of the function app, follow next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + CISCO_DUO_INTEGRATION_KEY + CISCO_DUO_SECRET_KEY + CISCO_DUO_API_HOSTNAME + CISCO_DUO_LOG_TYPES + WORKSPACE_ID + SHARED_KEY + logAnalyticsUri (Optional) + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ciscoetd.md b/Tools/Solutions Analyzer/connector-docs/connectors/ciscoetd.md index 6b6be7a1b31..2e37810fa80 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ciscoetd.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ciscoetd.md @@ -10,4 +10,39 @@ The connector fetches data from ETD api for threat analysis +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Email Threat Defense API, API key, Client ID and Secret**: Ensure you have the API key, Client ID and Secret key. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the ETD API to pull its logs into Microsoft Sentinel. + +**Follow the deployment steps to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the ETD data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**2. Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Cisco ETD data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CiscoETD-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Region**. +3. Enter the **WorkspaceID**, **SharedKey**, **ClientID**, **ClientSecret**, **ApiKey**, **Verdicts**, **ETD Region** +4. Click **Create** to deploy. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ciscofirepowerestreamer.md b/Tools/Solutions Analyzer/connector-docs/connectors/ciscofirepowerestreamer.md index 95cea99053c..79ea98cf251 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ciscofirepowerestreamer.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ciscofirepowerestreamer.md @@ -10,4 +10,71 @@ eStreamer is a Client Server API designed for the Cisco Firepower NGFW Solution. The eStreamer client requests detailed event data on behalf of the SIEM or logging solution in the Common Event Format (CEF). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 25226 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Install the Firepower eNcore client** + +Install and configure the Firepower eNcore eStreamer client, for more details see full install [guide](https://www.cisco.com/c/en/us/td/docs/security/firepower/670/api/eStreamer_enCore/eStreamereNcoreSentinelOperationsGuide_409.html) +**2.1 Download the Firepower Connector from github** + + Download the latest version of the Firepower eNcore connector for Microsoft Sentinel [here](https://github.com/CiscoSecurity/fp-05-microsoft-sentinel-connector). If you plan on using python3 use the [python3 eStreamer connector](https://github.com/CiscoSecurity/fp-05-microsoft-sentinel-connector/tree/python3) + + **2.2 Create a pkcs12 file using the Azure/VM Ip Address** + + Create a pkcs12 certificate using the public IP of the VM instance in Firepower under System->Integration->eStreamer, for more information please see install [guide](https://www.cisco.com/c/en/us/td/docs/security/firepower/670/api/eStreamer_enCore/eStreamereNcoreSentinelOperationsGuide_409.html#_Toc527049443) + + **2.3 Test Connectivity between the Azure/VM Client and the FMC** + + Copy the pkcs12 file from the FMC to the Azure/VM instance and run the test utility (./encore.sh test) to ensure a connection can be established, for more details please see the setup [guide](https://www.cisco.com/c/en/us/td/docs/security/firepower/670/api/eStreamer_enCore/eStreamereNcoreSentinelOperationsGuide_409.html#_Toc527049430) + + **2.4 Configure encore to stream data to the agent** + + Configure encore to stream data via TCP to the Microsoft Agent, this should be enabled by default, however, additional ports and streaming protocols can configured depending on your network security posture, it is also possible to save the data to the file system, for more information please see [Configure Encore](https://www.cisco.com/c/en/us/td/docs/security/firepower/670/api/eStreamer_enCore/eStreamereNcoreSentinelOperationsGuide_409.html#_Toc527049433) +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ciscofirepowerestreamerama.md b/Tools/Solutions Analyzer/connector-docs/connectors/ciscofirepowerestreamerama.md index fc447979a42..39aed5b33f1 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ciscofirepowerestreamerama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ciscofirepowerestreamerama.md @@ -10,4 +10,72 @@ eStreamer is a Client Server API designed for the Cisco Firepower NGFW Solution. The eStreamer client requests detailed event data on behalf of the SIEM or logging solution in the Common Event Format (CEF). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Install the Firepower eNcore client** + + Install and configure the Firepower eNcore eStreamer client, for more details see full install [guide](https://www.cisco.com/c/en/us/td/docs/security/firepower/670/api/eStreamer_enCore/eStreamereNcoreSentinelOperationsGuide_409.html) +**1. Download the Firepower Connector from github** + + Download the latest version of the Firepower eNcore connector for Microsoft Sentinel [here](https://github.com/CiscoSecurity/fp-05-microsoft-sentinel-connector). If you plan on using python3 use the [python3 eStreamer connector](https://github.com/CiscoSecurity/fp-05-microsoft-sentinel-connector/tree/python3) + + **2. Create a pkcs12 file using the Azure/VM Ip Address** + + Create a pkcs12 certificate using the public IP of the VM instance in Firepower under System->Integration->eStreamer, for more information please see install [guide](https://www.cisco.com/c/en/us/td/docs/security/firepower/670/api/eStreamer_enCore/eStreamereNcoreSentinelOperationsGuide_409.html#_Toc527049443) + + **3. Test Connectivity between the Azure/VM Client and the FMC** + + Copy the pkcs12 file from the FMC to the Azure/VM instance and run the test utility (./encore.sh test) to ensure a connection can be established, for more details please see the setup [guide](https://www.cisco.com/c/en/us/td/docs/security/firepower/670/api/eStreamer_enCore/eStreamereNcoreSentinelOperationsGuide_409.html#_Toc527049430) + + **4. Configure encore to stream data to the agent** + + Configure encore to stream data via TCP to the Microsoft Agent, this should be enabled by default, however, additional ports and streaming protocols can configured depending on your network security posture, it is also possible to save the data to the file system, for more information please see [Configure Encore](https://www.cisco.com/c/en/us/td/docs/security/firepower/670/api/eStreamer_enCore/eStreamereNcoreSentinelOperationsGuide_409.html#_Toc527049433) + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ciscoise.md b/Tools/Solutions Analyzer/connector-docs/connectors/ciscoise.md index 26f27ab9b73..e3a61ac90d1 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ciscoise.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ciscoise.md @@ -10,4 +10,45 @@ The Cisco Identity Services Engine (ISE) data connector provides the capability to ingest [Cisco ISE](https://www.cisco.com/c/en/us/products/security/identity-services-engine/index.html) events into Microsoft Sentinel. It helps you gain visibility into what is happening in your network, such as who is connected, which applications are installed and running, and much more. Refer to [Cisco ISE logging mechanism documentation](https://www.cisco.com/c/en/us/td/docs/security/ise/2-7/admin_guide/b_ise_27_admin_guide/b_ISE_admin_27_maintain_monitor.html#reference_BAFBA5FA046A45938810A5DF04C00591) for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-ciscoise-parser) to create the Kusto Functions alias, **CiscoISEEvent** + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + +1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. +2. Select **Apply below configuration to my machines** and select the facilities and severities. +3. Click **Save**. +- **Open Syslog settings** + +**3. Configure Cisco ISE Remote Syslog Collection Locations** + +[Follow these instructions](https://www.cisco.com/c/en/us/td/docs/security/ise/2-7/admin_guide/b_ise_27_admin_guide/b_ISE_admin_27_maintain_monitor.html#ID58) to configure remote syslog collection locations in your Cisco ISE deployment. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ciscomeraki.md b/Tools/Solutions Analyzer/connector-docs/connectors/ciscomeraki.md index f2c916a0b11..0efdd10ac88 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ciscomeraki.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ciscomeraki.md @@ -10,4 +10,86 @@ The [Cisco Meraki](https://meraki.cisco.com/) connector allows you to easily connect your Cisco Meraki (MX/MR/MS) logs with Microsoft Sentinel. This gives you more insight into your organization's network and improves your security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +**Custom Permissions:** +- **Cisco Meraki**: must be configured to export logs via Syslog + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias CiscoMeraki and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoMeraki/Parsers/CiscoMeraki.txt). The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Follow the configuration steps below to get Cisco Meraki device logs into Microsoft Sentinel. Refer to the [Azure Monitor Documentation](https://docs.microsoft.com/azure/azure-monitor/agents/data-sources-json) for more details on these steps. + For Cisco Meraki logs, we have issues while parsing the data by OMS agent data using default settings. +So we advice to capture the logs into custom table **meraki_CL** using below instructions. +1. Login to the server where you have installed OMS agent. +2. Download config file [meraki.conf](https://aka.ms/sentinel-ciscomerakioms-conf) + wget -v https://aka.ms/sentinel-ciscomerakioms-conf -O meraki.conf +3. Copy meraki.conf to the /etc/opt/microsoft/omsagent/**workspace_id**/conf/omsagent.d/ folder. + cp meraki.conf /etc/opt/microsoft/omsagent/<>/conf/omsagent.d/ +4. Edit meraki.conf as follows: + + a. meraki.conf uses the port **22033** by default. Ensure this port is not being used by any other source on your server + + b. If you would like to change the default port for **meraki.conf** make sure that you dont use default Azure monitoring /log analytic agent ports I.e.(For example CEF uses TCP port **25226** or **25224**) + + c. replace **workspace_id** with real value of your Workspace ID (lines 14,15,16,19) +5. Save changes and restart the Azure Log Analytics agent for Linux service with the following command: + sudo /opt/microsoft/omsagent/bin/service_control restart +6. Modify /etc/rsyslog.conf file - add below template preferably at the beginning / before directives section + $template meraki,"%timestamp% %hostname% %msg%\n" +7. Create a custom conf file in /etc/rsyslog.d/ for example 10-meraki.conf and add following filter conditions. + + With an added statement you will need to create a filter which will specify the logs coming from the Cisco Meraki to be forwarded to the custom table. + + reference: [Filter Conditions — rsyslog 8.18.0.master documentation](https://rsyslog.readthedocs.io/en/latest/configuration/filters.html) + + Here is an example of filtering that can be defined, this is not complete and will require additional testing for each installation. + if $rawmsg contains "flows" then @@127.0.0.1:22033;meraki + & stop + if $rawmsg contains "firewall" then @@127.0.0.1:22033;meraki + & stop + if $rawmsg contains "urls" then @@127.0.0.1:22033;meraki + & stop + if $rawmsg contains "ids-alerts" then @@127.0.0.1:22033;meraki + & stop + if $rawmsg contains "events" then @@127.0.0.1:22033;meraki + & stop + if $rawmsg contains "ip_flow_start" then @@127.0.0.1:22033;meraki + & stop + if $rawmsg contains "ip_flow_end" then @@127.0.0.1:22033;meraki + & stop +8. Restart rsyslog + systemctl restart rsyslog +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Configure and connect the Cisco Meraki device(s)** + +[Follow these instructions](https://documentation.meraki.com/General_Administration/Monitoring_and_Reporting/Meraki_Device_Reporting_-_Syslog%2C_SNMP_and_API) to configure the Cisco Meraki device(s) to forward syslog. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ciscomerakimultirule.md b/Tools/Solutions Analyzer/connector-docs/connectors/ciscomerakimultirule.md index b3b0a777796..82572b3ba7f 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ciscomerakimultirule.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ciscomerakimultirule.md @@ -20,4 +20,31 @@ The [Cisco Meraki](https://aka.ms/ciscomeraki) connector allows you to easily co 3. Audit Event +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +**Custom Permissions:** +- **Cisco Meraki REST API Key**: Enable API access in Cisco Meraki and generate API Key. Please refer to Cisco Meraki official [documentation](https://aka.ms/ciscomerakiapikey) for more information. +- **Cisco Meraki Organization Id**: Obtain your Cisco Meraki organization id to fetch security events. Follow the steps in the [documentation](https://aka.ms/ciscomerakifindorg) to obtain the Organization Id using the Meraki API Key obtained in previous step. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Cisco Meraki events to Microsoft Sentinel** + +Currently, this connector allows to ingest events from the following [Cisco Meraki REST API](https://developer.cisco.com/meraki/api-v1/#!get-organization-appliance-security-events) endpoint: + 1. [Get Organization Appliance Security Events](https://developer.cisco.com/meraki/api-latest/#!get-organization-appliance-security-events) +>This connector parses **IDS Alert** events into ASimNetworkSessionLogs Table and **File Scanned** events into ASimWebSessionLogs Table. + 2. [Get Organization Api Requests](https://developer.cisco.com/meraki/api-latest/#!get-organization-api-requests) +>This connector parses events into ASimWebSessionLogs Table. + 3. [Get Organization Configuration Changes](https://developer.cisco.com/meraki/api-latest/#!get-organization-configuration-changes) +>This connector parses events into ASimAuditEventLogs Table. +- **Organization Id**: OrganizationId +- **API Key**: (password field) +- Click 'Connect' to establish connection + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ciscomerakinativepoller.md b/Tools/Solutions Analyzer/connector-docs/connectors/ciscomerakinativepoller.md index 3b5a15c103e..196ee39e390 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ciscomerakinativepoller.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ciscomerakinativepoller.md @@ -16,4 +16,24 @@ The [Cisco Meraki](https://aka.ms/ciscomeraki) connector allows you to easily co 1. Network Session +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +**Custom Permissions:** +- **Cisco Meraki REST API Key**: Enable API access in Cisco Meraki and generate API Key. Please refer to Cisco Meraki official [documentation](https://aka.ms/ciscomerakiapikey) for more information. +- **Cisco Meraki Organization Id**: Obtain your Cisco Meraki organization id to fetch security events. Follow the steps in the [documentation](https://aka.ms/ciscomerakifindorg) to obtain the Organization Id using the Meraki API Key obtained in previous step. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Cisco Meraki Security Events to Microsoft Sentinel** + +To enable Cisco Meraki Security Events for Microsoft Sentinel, provide the required information below and click on Connect. +>This data connector depends on a parser based on a Kusto Function to render the content. [**CiscoMeraki**](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/CiscoMeraki/Parsers/CiscoMeraki.txt) Parser currently support only "**IDS Alert**" and "**File Scanned**" Events. +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `APIKey`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ciscosdwan.md b/Tools/Solutions Analyzer/connector-docs/connectors/ciscosdwan.md index d2ff329ef4a..25ca9907072 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ciscosdwan.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ciscosdwan.md @@ -10,4 +10,125 @@ The Cisco Software Defined WAN(SD-WAN) data connector provides the capability to ingest [Cisco SD-WAN](https://www.cisco.com/c/en_in/solutions/enterprise-networks/sd-wan/index.html) Syslog and Netflow data into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**To ingest Cisco SD-WAN Syslog and Netflow data into Microsoft Sentinel follow the steps below.** + +**1. Steps to ingest Syslog data to Microsoft sentinel** + +Azure Monitor Agent will be used to collect the syslog data into Microsoft sentinel. For that first need to create an azure arc server for the VM from which syslog data will be sent. + +**1.1 Steps to Add Azure Arc Server** + +1. In Azure portal, go to Servers - Azure Arc and click on Add. +2. Select Generate Script under Add a single server section. A User can also generate scripts for Multiple Servers as well. +3. Review the information on the Prerequisites page, then select Next. +4. On the Resource details page, provide the subscription and resource group of the Microsoft Sentinel, Region, Operating system and Connectivity method. Then select Next. +5. On the Tags page, review the default Physical location tags suggested and enter a value, or specify one or more Custom tags to support your standards. Then select Next +6. Select Download to save the script file. +7. Now that you have generated the script, the next step is to run it on the server that you want to onboard to Azure Arc. +8. If you have Azure VM follow the steps mentioned in the [link](https://learn.microsoft.com/azure/azure-arc/servers/plan-evaluate-on-azure-virtual-machine) before running the script. +9. Run the script by the following command: `./.sh` +10. After you install the agent and configure it to connect to Azure Arc-enabled servers, go to the Azure portal to verify that the server has successfully connected. View your machine in the Azure portal. +> **Reference link:** [https://learn.microsoft.com/azure/azure-arc/servers/learn/quick-enable-hybrid-vm](https://learn.microsoft.com/azure/azure-arc/servers/learn/quick-enable-hybrid-vm) + +**1.2 Steps to Create Data Collection Rule (DCR)** + +1. In Azure Portal search for Monitor. Under Settings, select Data Collection Rules and Select Create. +2. On the Basics panel, enter the Rule Name, Subscription, Resource group, Region and Platform Type. +3. Select Next: Resources. +4. Select Add resources.Use the filters to find the virtual machine that you'll use to collect logs. +5. Select the virtual machine. Select Apply. +6. Select Next: Collect and deliver. +7. Select Add data source. For Data source type, select Linux syslog. +8. For Minimum log level, leave the default values LOG_DEBUG. +9. Select Next: Destination. +10. Select Add destination and add Destination type, Subscription and Account or namespace. +11. Select Add data source. Select Next: Review + create. +12. Select Create. Wait for 20 minutes. In Microsoft Sentinel or Azure Monitor, verify that the Azure Monitor agent is running on your VM. +> **Reference link:** [https://learn.microsoft.com/azure/sentinel/forward-syslog-monitor-agent](https://learn.microsoft.com/azure/sentinel/forward-syslog-monitor-agent) + +**2. Steps to ingest Netflow data to Microsoft sentinel** + +To Ingest Netflow data into Microsoft sentinel, Filebeat and Logstash needs to be installed and configured on the VM. After the configuration, vm will be able to receive netflow data on the configured port and that data will be ingested into the workspace of Microsoft sentinel. + +**2.1 Install filebeat and logstash** + +1. For the installation of filebeat and logstash using apt refer to this doc: + 1. Filebeat: [https://www.elastic.co/guide/en/beats/filebeat/current/setup-repositories.html](https://www.elastic.co/guide/en/beats/filebeat/current/setup-repositories.html). + 2. Logstash: [https://www.elastic.co/guide/en/logstash/current/installing-logstash.html](https://www.elastic.co/guide/en/logstash/current/installing-logstash.html). +2. For the installation of filebeat and logstash for RedHat based Linux (yum) steps are as follows: + 1. Filebeat: [https://www.elastic.co/guide/en/beats/filebeat/current/setup-repositories.html#_yum](https://www.elastic.co/guide/en/beats/filebeat/current/setup-repositories.html#_yum). + 2. Logstash: [https://www.elastic.co/guide/en/logstash/current/installing-logstash.html#_yum](https://www.elastic.co/guide/en/logstash/current/installing-logstash.html#_yum) + +**2.2 Configure Filebeat to send events to Logstash** + +1. Edit filebeat.yml file: `vi /etc/filebeat/filebeat.yml` +2. Comment out the Elasticsearch Output section. +3. Uncomment Logstash Output section (Uncomment out only these two lines)- + output.logstash + hosts: ["localhost:5044"] +3. In the Logstash Output section, if you want to send the data other than the default port i.e. 5044 port, then replace the port number in the hosts field. (Note: This port should be added in the conf file, while configuring logstash.) +4. In the 'filebeat.inputs' section comment out existing configuration and add the following configuration: + - type: netflow + max_message_size: 10KiB + host: "0.0.0.0:2055" + protocols: [ v5, v9, ipfix ] + expiration_timeout: 30m + queue_size: 8192 + custom_definitions: + - /etc/filebeat/custom.yml + detect_sequence_reset: true + enabled: true +6. In the Filebeat inputs section, if you want to receive the data other than the default port i.e. 2055 port, then replace the port number in the host field. +7. Add the provided [custom.yml](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/Cisco%20SD-WAN/Data%20Connectors/custom.yml) file inside the /etc/filebeat/ directory. +8. Open the filebeat input and output port in the firewall. + 1. Run command: `firewall-cmd --zone=public --permanent --add-port=2055/udp` + 2. Run command: `firewall-cmd --zone=public --permanent --add-port=5044/udp` +> Note: if a custom port is added for filebeat input/output, then open that port in the firewall. + +**2.3 Configure Logstash to send events to Microsoft Sentinel** + +1. Install the Azure Log Analytics plugin: + 1. Run Command: `sudo /usr/share/logstash/bin/logstash-plugin install microsoft-logstash-output-azure-loganalytics` +3. Store the Log Analytics workspace key in the Logstash key store. The workspace key can be found in Azure Portal under Log analytic workspace > Select workspace > Under Settings select Agent > Log Analytics agent instructions. +4. Copy the Primary key and run the following commands: + 1. `sudo /usr/share/logstash/bin/logstash-keystore --path.settings /etc/logstash create LogAnalyticsKey` + 2. `sudo /usr/share/logstash/bin/logstash-keystore --path.settings /etc/logstash add LogAnalyticsKey` +5. Create the configuration file /etc/logstash/cisco-netflow-to-sentinel.conf: + input { + beats { + port => #(Enter output port number which has been configured during filebeat configuration i.e. filebeat.yml file .) + } + } + output { + microsoft-logstash-output-azure-loganalytics { + workspace_id => "" + workspace_key => "${LogAnalyticsKey}" + custom_log_table_name => "CiscoSDWANNetflow" + } + } +> Note: If table is not present in Microsoft sentinel, then it will create a new table in sentinel. + +**2.4 Run Filebeat:** + +1. Open a terminal and run the command: +> `systemctl start filebeat` +2. This command will start running filebeat in the background. To see the logs stop the filebeat (`systemctl stop filebeat`) then run the following command: +> `filebeat run -e` + +**2.5 Run Logstash:** + +1. In another terminal run the command: +> `/usr/share/logstash/bin/logstash --path.settings /etc/logstash -f /etc/logstash/cisco-netflow-to-sentinel.conf &` +2. This command will start running the logstash in the background. To see the logs of logstash kill the above process and run the following command : +> `/usr/share/logstash/bin/logstash --path.settings /etc/logstash -f /etc/logstash/cisco-netflow-to-sentinel.conf` + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ciscosecureendpoint.md b/Tools/Solutions Analyzer/connector-docs/connectors/ciscosecureendpoint.md index 67f7fd51a0a..e3e3b62b3da 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ciscosecureendpoint.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ciscosecureendpoint.md @@ -14,4 +14,93 @@ The Cisco Secure Endpoint (formerly AMP for Endpoints) data connector provides t

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

+## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Cisco Secure Endpoint API credentials**: Cisco Secure Endpoint Client ID and API Key are required. [See the documentation to learn more about Cisco Secure Endpoint API](https://api-docs.amp.cisco.com/api_resources?api_host=api.amp.cisco.com&api_version=v1). [API domain](https://api-docs.amp.cisco.com) must be provided as well. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Cisco Secure Endpoint API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**CiscoSecureEndpoint**](https://aka.ms/sentinel-ciscosecureendpoint-parser) which is deployed with the Microsoft Sentinel Solution. + +**STEP 1 - Obtaining Cisco Secure Endpoint API credentials** + +1. Follow the instructions in the [documentation](https://api-docs.amp.cisco.com/api_resources?api_host=api.amp.cisco.com&api_version=v1) to generate Client ID and API Key. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Azure Blob Storage connection string and container name, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ciscosecureendpoint-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Cisco Secure Endpoint Api Host**, **Cisco Secure Endpoint Client Id**, **Cisco Secure Endpoint Api Key**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key** +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-ciscosecureendpoint-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + CISCO_SE_API_API_HOST + CISCO_SE_API_CLIENT_ID + CISCO_SE_API_KEY + WORKSPACE_ID + SHARED_KEY + logAnalyticsUri (Optional) + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ciscosecureendpointlogsccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/ciscosecureendpointlogsccpdefinition.md index 3502e51cd16..5b11cb5cf61 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ciscosecureendpointlogsccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ciscosecureendpointlogsccpdefinition.md @@ -10,4 +10,49 @@ The Cisco Secure Endpoint (formerly AMP for Endpoints) data connector provides the capability to ingest Cisco Secure Endpoint [audit logs](https://developer.cisco.com/docs/secure-endpoint/auditlog/) and [events](https://developer.cisco.com/docs/secure-endpoint/v1-api-reference-event/) into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **Cisco Secure Endpoint API Credentials/Regions**: To create API Credentials and to understand the regions, follow the document link provided here. [Click here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Endpoint/Data%20Connectors/README.md). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Cisco Secure Endpoint to Microsoft Sentinel** + +To ingest data from Cisco Secure Endpoint to Microsoft Sentinel, you have to click on Add Account button below, then you get a pop up to fill the details like Email, Organization, Client ID, API Key and Region, provide the required information and click on Connect. You can see the connected organizations/emails in the below grid. +> +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Organization** +- **Email** +- **Endpoint** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + +**Add Account** + +*Add Account* + +When you click the "Add Account" button in the portal, a configuration form will open. You'll need to provide: + +- **Cisco Secure Endpoint Email** (optional): Enter your Cisco Email +- **Cisco Secure Endpoint Organization** (optional): Enter the name of your Organization +- **Cisco Secure Endpoint Client ID** (optional): Enter your Client ID +- **Cisco Secure Endpoint API Key** (optional): Enter your API Key +- **Cisco Secure Endpoint Region** (optional): Enter the region you want to connect + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ciscoseg.md b/Tools/Solutions Analyzer/connector-docs/connectors/ciscoseg.md index b4c0171aa0a..5a3b9097a61 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ciscoseg.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ciscoseg.md @@ -10,4 +10,65 @@ The [Cisco Secure Email Gateway (SEG)](https://www.cisco.com/c/en/us/products/security/email-security/index.html) data connector provides the capability to ingest [Cisco SEG Consolidated Event Logs](https://www.cisco.com/c/en/us/td/docs/security/esa/esa14-0/user_guide/b_ESA_Admin_Guide_14-0/b_ESA_Admin_Guide_12_1_chapter_0100111.html#con_1061902) into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**CiscoSEGEvent**](https://aka.ms/sentinel-CiscoSEG-parser) which is deployed with the Microsoft Sentinel Solution. + +>**NOTE:** This data connector has been developed using AsyncOS 14.0 for Cisco Secure Email Gateway + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Common Event Format (CEF) logs to Syslog agent** + +Follow these steps to configure Cisco Secure Email Gateway to forward logs via syslog: + +2.1. Configure [Log Subscription](https://www.cisco.com/c/en/us/td/docs/security/esa/esa14-0/user_guide/b_ESA_Admin_Guide_14-0/b_ESA_Admin_Guide_12_1_chapter_0100111.html#con_1134718) + +>**NOTE:** Select **Consolidated Event Logs** in Log Type field. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ciscosegama.md b/Tools/Solutions Analyzer/connector-docs/connectors/ciscosegama.md index 99f0f98455f..af7d76497e7 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ciscosegama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ciscosegama.md @@ -10,4 +10,63 @@ The [Cisco Secure Email Gateway (SEG)](https://www.cisco.com/c/en/us/products/security/email-security/index.html) data connector provides the capability to ingest [Cisco SEG Consolidated Event Logs](https://www.cisco.com/c/en/us/td/docs/security/esa/esa14-0/user_guide/b_ESA_Admin_Guide_14-0/b_ESA_Admin_Guide_12_1_chapter_0100111.html#con_1061902) into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**CiscoSEGEvent**](https://aka.ms/sentinel-CiscoSEG-parser) which is deployed with the Microsoft Sentinel Solution. +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Common Event Format (CEF) logs to Syslog agent** + + Follow these steps to configure Cisco Secure Email Gateway to forward logs via syslog: + + Configure [Log Subscription](https://www.cisco.com/c/en/us/td/docs/security/esa/esa14-0/user_guide/b_ESA_Admin_Guide_14-0/b_ESA_Admin_Guide_12_1_chapter_0100111.html#con_1134718) + +>**NOTE:** Select **Consolidated Event Logs** in Log Type field. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +2Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ciscoucs.md b/Tools/Solutions Analyzer/connector-docs/connectors/ciscoucs.md index f804508346d..21316fe5065 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ciscoucs.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ciscoucs.md @@ -10,4 +10,47 @@ The [Cisco Unified Computing System (UCS)](https://www.cisco.com/c/en/us/products/servers-unified-computing/index.html) connector allows you to easily connect your Cisco UCS logs with Microsoft Sentinel This gives you more insight into your organization's network and improves your security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +**Custom Permissions:** +- **Cisco UCS**: must be configured to export logs via Syslog + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias CiscoUCS and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20UCS/Parsers/CiscoUCS.yaml). The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. + 2. Select **Apply below configuration to my machines** and select the facilities and severities. + 3. Click **Save**. +- **Open Syslog settings** + +**3. Configure and connect the Cisco UCS** + +[Follow these instructions](https://www.cisco.com/c/en/us/support/docs/servers-unified-computing/ucs-manager/110265-setup-syslog-for-ucs.html#configsremotesyslog) to configure the Cisco UCS to forward syslog. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ciscoumbrelladataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/ciscoumbrelladataconnector.md index 1a770d96e88..33ce89b8b90 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ciscoumbrelladataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ciscoumbrelladataconnector.md @@ -10,4 +10,76 @@ The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Amazon S3 REST API Credentials/permissions**: **AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**NOTE:** This connector has been updated to support [Cisco Cloud Security log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning) + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App. + +>**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**. + +**STEP 1 - Configuration of the Cisco Cloud Security logs collection** + +[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions** + +>**IMPORTANT:** Before deploying the Cisco Cloud Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Option 1 - Azure Resource Manager (ARM) Template** + + Use this method for automated deployment of the Cisco Cloud Security data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy-gov) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey** +**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + + **Option 2 - Manual Deployment of Azure Functions** + + Use the following step-by-step instructions to deploy the Cisco Cloud Security data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development. + +1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer. +2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode. +3. After successful deployment of the function app, follow next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + WorkspaceID + WorkspaceKey + S3Bucket + AWSAccessKeyId + AWSSecretAccessKey + logAnalyticsUri (optional) +> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +3. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ciscoumbrelladataconnectorelasticpremium.md b/Tools/Solutions Analyzer/connector-docs/connectors/ciscoumbrelladataconnectorelasticpremium.md index fb5812a8b19..74a0ec1b36b 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ciscoumbrelladataconnectorelasticpremium.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ciscoumbrelladataconnectorelasticpremium.md @@ -14,4 +14,88 @@ The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbre **NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Amazon S3 REST API Credentials/permissions**: **AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API. +- **Virtual Network permissions (for private access)**: For private storage account access, **Network Contributor** permissions are required on the Virtual Network and subnet. The subnet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**NOTE:** This connector has been updated to support [cisco umbrella log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning) + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App. + +>**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**. + +**STEP 1 - Network Prerequisites for Private Access** + +>**IMPORTANT:** When deploying with private storage account access, ensure the following network prerequisites are met: +> - **Virtual Network**: An existing Virtual Network (VNet) must be available +> - **Subnet**: A dedicated subnet within the VNet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration +> - **Subnet Delegation**: Configure the subnet delegation using Azure Portal, ARM template, or Azure CLI: +> - Azure Portal: Go to Virtual networks → Select your VNet → Subnets → Select subnet → Delegate subnet to service → Choose **Microsoft.Web/serverFarms** +> - Azure CLI: `az network vnet subnet update --resource-group --vnet-name --name --delegations Microsoft.Web/serverFarms` +> - **Private Endpoints**: The deployment will create private endpoints for storage account services (blob, file, queue, table) within the same subnet + +**STEP 2 - Configuration of the Cisco Umbrella logs collection** + +[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials. + +**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions** + +>**IMPORTANT:** Before deploying the Cisco Umbrella data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Option 1 - Azure Resource Manager (ARM) Template** + + Use this method for automated deployment of the Cisco Umbrella data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy-gov) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey** +4. **For Private Access Deployment**: Also enter **existingVnetName**, **existingVnetResourceGroupName**, and **existingSubnetName** (ensure subnet is delegated to Microsoft.Web/serverFarms) +**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value +5. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +6. Click **Purchase** to deploy. + + **Option 2 - Manual Deployment of Azure Functions** + + Use the following step-by-step instructions to deploy the Cisco Umbrella data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development. + +1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer. +2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode. +3. After successful deployment of the function app, follow next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + WorkspaceID + WorkspaceKey + S3Bucket + AWSAccessKeyId + AWSSecretAccessKey + logAnalyticsUri (optional) +> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +3. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ciscowsa.md b/Tools/Solutions Analyzer/connector-docs/connectors/ciscowsa.md index 90b5588b70a..9959e5abddf 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ciscowsa.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ciscowsa.md @@ -10,4 +10,59 @@ [Cisco Web Security Appliance (WSA)](https://www.cisco.com/c/en/us/products/security/web-security-appliance/index.html) data connector provides the capability to ingest [Cisco WSA Access Logs](https://www.cisco.com/c/en/us/td/docs/security/wsa/wsa_14-0/User-Guide/b_WSA_UserGuide_14_0/b_WSA_UserGuide_11_7_chapter_010101.html) into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**CiscoWSAEvent**](https://aka.ms/sentinel-CiscoWSA-parser) which is deployed with the Microsoft Sentinel Solution. + +>**NOTE:** This data connector has been developed using AsyncOS 14.0 for Cisco Web Security Appliance + +**1. Configure Cisco Web Security Appliance to forward logs via Syslog to remote server where you will install the agent.** + +[Follow these steps](https://www.cisco.com/c/en/us/td/docs/security/esa/esa14-0/user_guide/b_ESA_Admin_Guide_14-0/b_ESA_Admin_Guide_12_1_chapter_0100111.html#con_1134718) to configure Cisco Web Security Appliance to forward logs via Syslog + +>**NOTE:** Select **Syslog Push** as a Retrieval Method. + +**2. Install and onboard the agent for Linux or Windows** + +Install the agent on the Server to which the logs will be forwarded. + +> Logs on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**3. Check logs in Microsoft Sentinel** + +Open Log Analytics to check if the logs are received using the Syslog schema. + +>**NOTE:** It may take up to 15 minutes before new logs will appear in Syslog table. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/citrix.md b/Tools/Solutions Analyzer/connector-docs/connectors/citrix.md index 97d43fd15e9..9afbdffd37e 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/citrix.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/citrix.md @@ -10,4 +10,23 @@ Citrix Analytics (Security) integration with Microsoft Sentinel helps you to export data analyzed for risky events from Citrix Analytics (Security) into Microsoft Sentinel environment. You can create custom dashboards, analyze data from other sources along with that from Citrix Analytics (Security) and create custom workflows using Logic Apps to monitor and mitigate security events. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Licensing**: Entitlements to Citrix Security Analytics in Citrix Cloud. Please review [Citrix Tool License Agreement.](https://aka.ms/sentinel-citrixanalyticslicense-readme) + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +To get access to this capability and the configuration steps on Citrix Analytics, please visit: [Connect Citrix to Microsoft Sentinel.](https://aka.ms/Sentinel-Citrix-Connector)​ +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/citrixadc.md b/Tools/Solutions Analyzer/connector-docs/connectors/citrixadc.md index 32f11b015e3..3e018917bb2 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/citrixadc.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/citrixadc.md @@ -10,4 +10,68 @@ The [Citrix ADC (former NetScaler)](https://www.citrix.com/products/citrix-adc/) data connector provides the capability to ingest Citrix ADC logs into Microsoft Sentinel. If you want to ingest Citrix WAF logs into Microsoft Sentinel, refer this [documentation](https://learn.microsoft.com/azure/sentinel/data-connectors/citrix-waf-web-app-firewall) +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** 1. This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias CitrixADCEvent and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20ADC/Parsers/CitrixADCEvent.yaml), this function maps Citrix ADC (former NetScaler) events to Advanced Security Information Model [ASIM](https://docs.microsoft.com/azure/sentinel/normalization). The function usually takes 10-15 minutes to activate after solution installation/update. + +>**NOTE:** 2. This parser requires a watchlist named **`Sources_by_SourceType`** + +> i. If you don't have watchlist already created, please click [here](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FASIM%2Fdeploy%2FWatchlists%2FASimSourceType.json) to create. + +> ii. Open watchlist **`Sources_by_SourceType`** and add entries for this data source. + +> iii. The SourceType value for CitrixADC is **`CitrixADC`**. + +> You can refer [this](https://learn.microsoft.com/en-us/azure/sentinel/normalization-manage-parsers?WT.mc_id=Portal-fx#configure-the-sources-relevant-to-a-source-specific-parser) documentation for more details + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. + 2. Select **Apply below configuration to my machines** and select the facilities and severities. + 3. Click **Save**. +- **Open Syslog settings** + +**3. Configure Citrix ADC to forward logs via Syslog** + +3.1 Navigate to **Configuration tab > System > Auditing > Syslog > Servers tab** + + 3.2 Specify **Syslog action name**. + + 3.3 Set IP address of remote Syslog server and port. + + 3.4 Set **Transport type** as **TCP** or **UDP** depending on your remote Syslog server configuration. + + 3.5 You can refer Citrix ADC (former NetScaler) [documentation](https://docs.netscaler.com/) for more details. + +**4. Check logs in Microsoft Sentinel** + +Open Log Analytics to check if the logs are received using the Syslog schema. + +>**NOTE:** It may take up to 15 minutes before new logs will appear in Syslog table. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/citrixwaf.md b/Tools/Solutions Analyzer/connector-docs/connectors/citrixwaf.md index c44fe804b7e..c198696a7c8 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/citrixwaf.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/citrixwaf.md @@ -14,4 +14,63 @@ Citrix WAF supports Common Event Format (CEF) which is an industry standard format on top of Syslog messages . By connecting Citrix WAF CEF logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Common Event Format (CEF) logs to Syslog agent** + +Configure Citrix WAF to send Syslog messages in CEF format to the proxy machine using the steps below. + +1. Follow [this guide](https://support.citrix.com/article/CTX234174) to configure WAF. + +2. Follow [this guide](https://support.citrix.com/article/CTX136146) to configure CEF logs. + +3. Follow [this guide](https://docs.citrix.com/en-us/citrix-adc/13/system/audit-logging/configuring-audit-logging.html) to forward the logs to proxy . Make sure you to send the logs to port 514 TCP on the Linux machine's IP address. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/citrixwafama.md b/Tools/Solutions Analyzer/connector-docs/connectors/citrixwafama.md index ce8e0e43f75..9ea112bc9fa 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/citrixwafama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/citrixwafama.md @@ -14,4 +14,67 @@ Citrix WAF supports Common Event Format (CEF) which is an industry standard format on top of Syslog messages . By connecting Citrix WAF CEF logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Common Event Format (CEF) logs to Syslog agent** + + Configure Citrix WAF to send Syslog messages in CEF format to the proxy machine using the steps below. + +1. Follow [this guide](https://support.citrix.com/article/CTX234174) to configure WAF. + +2. Follow [this guide](https://support.citrix.com/article/CTX136146) to configure CEF logs. + +3. Follow [this guide](https://docs.citrix.com/en-us/citrix-adc/13/system/audit-logging/configuring-audit-logging.html) to forward the logs to proxy . Make sure you to send the logs to port 514 TCP on the Linux machine's IP address. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/claroty.md b/Tools/Solutions Analyzer/connector-docs/connectors/claroty.md index 050e7ae74b1..dd6be309b5f 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/claroty.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/claroty.md @@ -10,4 +10,67 @@ The [Claroty](https://claroty.com/) data connector provides the capability to ingest [Continuous Threat Detection](https://claroty.com/resources/datasheets/continuous-threat-detection) and [Secure Remote Access](https://claroty.com/industrial-cybersecurity/sra) events into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**ClarotyEvent**](https://aka.ms/sentinel-claroty-parser) which is deployed with the Microsoft Sentinel Solution. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Configure Claroty to send logs using CEF** + +Configure log forwarding using CEF: + +1. Navigate to the **Syslog** section of the Configuration menu. + +2. Select **+Add**. + +3. In the **Add New Syslog Dialog** specify Remote Server **IP**, **Port**, **Protocol** and select **Message Format** - **CEF**. + +4. Choose **Save** to exit the **Add Syslog dialog**. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/clarotyama.md b/Tools/Solutions Analyzer/connector-docs/connectors/clarotyama.md index 0f1651dc31a..c5f3aa44c0f 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/clarotyama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/clarotyama.md @@ -10,4 +10,67 @@ The [Claroty](https://claroty.com/) data connector provides the capability to ingest [Continuous Threat Detection](https://claroty.com/resources/datasheets/continuous-threat-detection) and [Secure Remote Access](https://claroty.com/industrial-cybersecurity/sra) events into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**ClarotyEvent**](https://aka.ms/sentinel-claroty-parser) which is deployed with the Microsoft Sentinel Solution. +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Configure Claroty to send logs using CEF** + + Configure log forwarding using CEF: + +1. Navigate to the **Syslog** section of the Configuration menu. + +2. Select **+Add**. + +3. In the **Add New Syslog Dialog** specify Remote Server **IP**, **Port**, **Protocol** and select **Message Format** - **CEF**. + +4. Choose **Save** to exit the **Add Syslog dialog**. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/clarotyxdome.md b/Tools/Solutions Analyzer/connector-docs/connectors/clarotyxdome.md index 0c22029e42d..e584f2f1adb 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/clarotyxdome.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/clarotyxdome.md @@ -10,4 +10,57 @@ [Claroty](https://claroty.com/) xDome delivers comprehensive security and alert management capabilities for healthcare and industrial network environments. It is designed to map multiple source types, identify the collected data, and integrate it into Microsoft Sentinel data models. This results in the ability to monitor all potential threats in your healthcare and industrial environments in one location, leading to more effective security monitoring and a stronger security posture. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python --version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Common Event Format (CEF) logs to Syslog agent** + +Configure the Claroty xDome - Microsoft Sentinel integration to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python --version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/cloudflaredataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/cloudflaredataconnector.md index 03a1bd16e0e..c3fb619b11f 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/cloudflaredataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/cloudflaredataconnector.md @@ -14,4 +14,92 @@ The Cloudflare data connector provides the capability to ingest [Cloudflare logs

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

+## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Azure Blob Storage connection string and container name**: Azure Blob Storage connection string and container name where the logs are pushed to by Cloudflare Logpush. [See the documentation to learn more about creating Azure Blob Storage container.](https://learn.microsoft.com/azure/storage/blobs/storage-quickstart-blobs-portal) + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Azure Blob Storage API to pull logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Cloudflare**](https://aka.ms/sentinel-CloudflareDataConnector-parser) which is deployed with the Microsoft Sentinel Solution. + +**STEP 1 - Configuration of the Cloudflare Logpush** + +See documentation to [setup Cloudflare Logpush to Microsoft Azure](https://developers.cloudflare.com/logs/logpush/logpush-dashboard) + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Cloudflare data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Azure Blob Storage connection string and container name, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Cloudflare data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CloudflareDataConnector-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Azure Blob Storage Container Name**, **Azure Blob Storage Connection String**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key** +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Cloudflare data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-CloudflareDataConnector-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CloudflareXX). + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + CONTAINER_NAME + AZURE_STORAGE_CONNECTION_STRING + WORKSPACE_ID + SHARED_KEY + logAnalyticsUri (Optional) + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/cloudflaredefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/cloudflaredefinition.md index 07b2a535bbe..3749706c201 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/cloudflaredefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/cloudflaredefinition.md @@ -10,4 +10,30 @@ The Cloudflare data connector provides the capability to ingest Cloudflare logs into Microsoft Sentinel using the Cloudflare Logpush and Azure Blob Storage. Refer to [Cloudflare documentation](https://developers.cloudflare.com/logs/about/)for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **Create a storage account and a container**: Before setting up logpush in Cloudflare, first create a storage account and a container in Microsoft Azure. Use [this guide](https://learn.microsoft.com/en-us/azure/storage/blobs/storage-blobs-introduction) to know more about Container and Blob. Follow the steps in the [documentation](https://learn.microsoft.com/en-us/azure/storage/common/storage-account-create?tabs=azure-portal) to create an Azure Storage account. +- **Generate a Blob SAS URL**: Create and Write permissions are required. Refer the [documentation](https://learn.microsoft.com/en-us/azure/ai-services/translator/document-translation/how-to-guides/create-sas-tokens?tabs=Containers) to know more about Blob SAS token and url. +- **Collecting logs from Cloudflare to your Blob container**: Follow the steps in the [documentation](https://developers.cloudflare.com/logs/get-started/enable-destinations/azure/) for collecting logs from Cloudflare to your Blob container. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Cloudflare Logs to Microsoft Sentinel** + +To enable Cloudflare logs for Microsoft Sentinel, provide the required information below and click on Connect. +> +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `ServicePrincipalIDTextBox_test`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. +- **The Blob container's URL you want to collect data from** +- **The Blob container's storage account resource group name** +- **The Blob container's storage account location** +- **The Blob container's storage account subscription id** +- **The event grid topic name of the blob container's storage account if exist. else keep empty.** +- Click 'Connect' to establish connection + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/cloudguardccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/cloudguardccpdefinition.md index b3220fd3411..d9022cba5df 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/cloudguardccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/cloudguardccpdefinition.md @@ -10,4 +10,26 @@ The [CloudGuard](https://sc1.checkpoint.com/documents/CloudGuard_Dome9/Documentation/Overview/CloudGuard-CSPM-Introduction.htm?cshid=help_center_documentation) data connector enables the ingestion of security events from the CloudGuard API into Microsoft Sentinel™, using Microsoft Sentinel’s Codeless Connector Platform. The connector supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) which parses incoming security event data into custom columns. This pre-parsing process eliminates the need for query-time parsing, resulting in improved performance for data queries. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **CloudGuard API Key**: Refer to the instructions provided [here](https://sc1.checkpoint.com/documents/CloudGuard_Dome9/Documentation/Settings/Users-Roles.htm#add_service) to generate an API key. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect CloudGuard Security Events to Microsoft Sentinel** + +To enable the CloudGuard connector for Microsoft Sentinel, enter the required information below and select Connect. +> +- **API Key ID**: api_key +- **API Key Secret**: (password field) +- **CloudGuard Endpoint URL**: e.g. https://api.dome9.com +- **Filter**: Paste filter from CloudGuard +- Click 'Connect' to establish connection + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/cofenseintelligence.md b/Tools/Solutions Analyzer/connector-docs/connectors/cofenseintelligence.md index aab5448db4c..f218af953ad 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/cofenseintelligence.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/cofenseintelligence.md @@ -48,4 +48,178 @@ The [Cofense-Intelligence](https://cofense.com/product-services/phishing-intelli > https://learn.microsoft.com/microsoft-365/security/defender-endpoint/ti-indicator?view=o365-worldwide +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Azure Subscription**: Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group. +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **Cofense Username** and **Password** is required. See the documentation to learn more about API on the [Rest API reference](https://www.threathq.com/docs/rest_api_reference.html) +- **Microsoft Defender for Endpoints**: **Microsoft Defender for Endpoints License** is required for SentinelToDefender function. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Cofense Intelligence APIs to pull its Threat Indicators and create Threat Intelligence Indicators into Microsoft Sentinel Threat Intelligence and create/update Threat Indicators in Cofense. Likewise, it also creates/updates Cofense Based Threat Indicators in Microsoft Defender for Endpoints. All this might result in additional indicator and data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - App Registration steps for the Microsoft Azure Active Directory Application** + + This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new Azure Active Directory application: + 1. Sign in to the [Azure portal](https://portal.azure.com/). + 2. Search for and select **Azure Active Directory**. + 3. Under **Manage**, select **App registrations > New registration**. + 4. Enter a display **Name** for your application. + 5. Select **Register** to complete the initial app registration. + 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of CofenseIntelligence Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app) + +**STEP 2 - Add a client secret for Microsoft Azure Active Directory Application** + + Sometimes called an application password, a client secret is a string value required for the execution of CofenseIntelligence Data Connector. Follow the steps in this section to create a new Client Secret: + 1. In the Azure portal, in **App registrations**, select your application. + 2. Select **Certificates & secrets > Client secrets > New client secret**. + 3. Add a description for your client secret. + 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months. + 5. Select **Add**. + 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of CofenseIntelligence Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret) + +**STEP 3 - Assign role of Contributor to Microsoft Azure Active Directory Application** + + Follow the steps in this section to assign the role: + 1. In the Azure portal, Go to **Resource Group** and select your resource group. + 2. Go to **Access control (IAM)** from left panel. + 3. Click on **Add**, and then select **Add role assignment**. + 4. Select **Contributor** as role and click on next. + 5. In **Assign access to**, select `User, group, or service principal`. + 6. Click on **add members** and type **your app name** that you have created and select it. + 7. Now click on **Review + assign** and then again click on **Review + assign**. + +> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal) + +**STEP 4 - Assign Defender Threat Indicator permissions to Microsoft Azure Active Directory Application** + + Follow the steps in this section to assign the permissions: + 1. In the Azure portal, in **App registrations**, select **your application**. + 2. To enable an app to access Defender for Endpoint indicators, assign it **'Ti.ReadWrite.All'** permission, on your application page, select **API Permissions > Add permission > APIs my organization uses >, type WindowsDefenderATP, and then select WindowsDefenderATP**. + 3. Select **Application permissions > Ti.ReadWrite.All**, and then select **Add permissions**. + 4. Select **Grant consent**. + +> **Reference link:** [https://docs.microsoft.com/microsoft-365/security/defender-endpoint/exposed-apis-create-app-webapp?view=o365-worldwide](https://docs.microsoft.com/microsoft-365/security/defender-endpoint/exposed-apis-create-app-webapp?view=o365-worldwide) + +**STEP 5 - Steps to create/get Credentials for the Cofense Intelligence account** + + Follow the steps in this section to create/get **Cofense Username** and **Password**: + 1. Login to https://threathq.com and go to the **Settings menu** on the left navigation bar. + 2. Choose the API Tokens tab and select **Add a New Token** + 3. Make sure to save the **password**, as it will not be accessible again. + +**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Cofense Intelligence Threat Indicators data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Cofense API Authorization Key(s). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**7. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Cofense connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CofenseIntelligence-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the below information : + Function Name + Workspace ID + Workspace Key + Cofense BaseURL (https:///) + Cofense Username + Cofense Password + Azure Client ID + Azure Client Secret + Azure Tenant ID + Azure Resource Group Name + Azure Workspace Name + Azure Subscription ID + RequireProxy + Proxy Username (optional) + Proxy Password (optional) + Proxy URL (optional) + Proxy Port (optional) + LogLevel (optional) + Malware_Data_Table_name + SendCofenseIndicatorToDefender + Schedule +4. Click on **Review+Create**. +5. Then after validation click on **Create** to deploy. + +**8. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Cofense Intelligence Threat Indicators data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-CofenseIntelligence-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CofenseXXXXX). + + e. **Select a runtime:** Choose Python 3.11 or above. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective values (case-sensitive): + Workspace ID + Workspace Key + Cofense BaseURL (https:///) + Cofense Username + Cofense Password + Azure Client ID + Azure Client Secret + Azure Tenant ID + Azure Resource Group Name + Azure Workspace Name + Function App Name + Azure Subscription ID + RequireProxy + Proxy Username (optional) + Proxy Password (optional) + Proxy URL (optional) + Proxy Port (optional) + LogLevel (optional) + Malware_Data_Table_name + SendCofenseIndicatorToDefender + Schedule +4. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/cofensetriage.md b/Tools/Solutions Analyzer/connector-docs/connectors/cofensetriage.md index c0dbbb795dc..68f7cf4a35b 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/cofensetriage.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/cofensetriage.md @@ -46,4 +46,179 @@ The [Cofense-Triage](https://cofense.com/product-services/cofense-triage/) data > https://learn.microsoft.com/microsoft-365/security/defender-endpoint/ti-indicator?view=o365-worldwide +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Azure Subscription**: Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group. +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **Cofense Client ID** and **Client Secret** is required. See the documentation to learn more about API on the `https:///docs/api/v2/index.html` +- **Microsoft Defender for Endpoints**: **Microsoft Defender for Endpoints License** is required for IndicatorCreatorToDefender function. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Cofense APIs to pull its Threat Indicators and create Threat Intelligence Indicators into Microsoft Sentinel Threat Intelligence and pulls Non-Cofense Threat Intelligence Indicators from Microsoft Sentinel and create/update Threat Indicators in Cofense. Likewise, it also creates/updates Cofense Based Threat Indicators in Microsoft Defender for Endpoints. All this might result in additional indicator and data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - App Registration steps for the Microsoft Azure Active Directory Application** + + This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new Azure Active Directory application: + 1. Sign in to the [Azure portal](https://portal.azure.com/). + 2. Search for and select **Azure Active Directory**. + 3. Under **Manage**, select **App registrations > New registration**. + 4. Enter a display **Name** for your application. + 5. Select **Register** to complete the initial app registration. + 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of CofenseTriage Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app) + +**STEP 2 - Add a client secret for Microsoft Azure Active Directory Application** + + Sometimes called an application password, a client secret is a string value required for the execution of CofenseTriage Data Connector. Follow the steps in this section to create a new Client Secret: + 1. In the Azure portal, in **App registrations**, select your application. + 2. Select **Certificates & secrets > Client secrets > New client secret**. + 3. Add a description for your client secret. + 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months. + 5. Select **Add**. + 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of CofenseTriage Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret) + +**STEP 3 - Assign role of Contributor to Microsoft Azure Active Directory Application** + + Follow the steps in this section to assign the role: + 1. In the Azure portal, Go to **Resource Group** and select your resource group. + 2. Go to **Access control (IAM)** from left panel. + 3. Click on **Add**, and then select **Add role assignment**. + 4. Select **Contributor** as role and click on next. + 5. In **Assign access to**, select `User, group, or service principal`. + 6. Click on **add members** and type **your app name** that you have created and select it. + 7. Now click on **Review + assign** and then again click on **Review + assign**. + +> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal) + +**STEP 4 - Assign Defender Threat Indicator permissions to Microsoft Azure Active Directory Application** + + Follow the steps in this section to assign the permissions: + 1. In the Azure portal, in **App registrations**, select **your application**. + 2. To enable an app to access Defender for Endpoint indicators, assign it **'Ti.ReadWrite.All'** permission, on your application page, select **API Permissions > Add permission > APIs my organization uses >, type WindowsDefenderATP, and then select WindowsDefenderATP**. + 3. Select **Application permissions > Ti.ReadWrite.All**, and then select **Add permissions**. + 4. Select **Grant consent**. + +> **Reference link:** [https://docs.microsoft.com/microsoft-365/security/defender-endpoint/exposed-apis-create-app-webapp?view=o365-worldwide](https://docs.microsoft.com/microsoft-365/security/defender-endpoint/exposed-apis-create-app-webapp?view=o365-worldwide) + +**STEP 5 - Steps to create/get Credentials for the Cofense Triage account** + + Follow the steps in this section to create/get **Cofense Client ID** and **Client Secret**: + 1. Go to **Administration > API Management > Version 2 tab > Applications** + 2. Click on **New Application** + 3. Add the required information and click on **submit**. + +**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Cofense Triage Threat Indicators data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Cofense API Authorization Key(s). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**7. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Cofense connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CofenseTriage-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the below information : + Function Name + Workspace ID + Workspace Key + Cofense URL (https:///) + Cofense Client ID + Cofense Client Secret + Azure Client ID + Azure Client Secret + Azure Tenant ID + Azure Resource Group Name + Azure Workspace Name + Azure Subscription ID + Threat Level + Proxy Username (optional) + Proxy Password (optional) + Proxy URL (optional) + Proxy Port (optional) + Throttle Limit for Non-Cofense Indicators (optional) + LogLevel (optional) + Reports Table Name + Schedule +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**8. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Cofense Triage Threat Indicators data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-CofenseThreatIndicatorsAPI-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CofenseXXXXX). + + e. **Select a runtime:** Choose Python 3.11 or above. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective values (case-sensitive): + Workspace ID + Workspace Key + Cofense URL (https:///) + Cofense Client ID + Cofense Client Secret + Azure Client ID + Azure Client Secret + Azure Tenant ID + Azure Resource Group Name + Azure Workspace Name + Azure Subscription ID + Threat Level + Proxy Username (optional) + Proxy Password (optional) + Proxy URL (optional) + Proxy Port (optional) + Throttle Limit for Non-Cofense Indicators (optional) + LogLevel (optional) + Reports Table Name + Schedule + logAnalyticsUri (optional) + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/cognnisentineldataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/cognnisentineldataconnector.md index 98983142e79..7ee34d43c57 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/cognnisentineldataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/cognnisentineldataconnector.md @@ -10,4 +10,28 @@ The Cognni connector offers a quick and simple integration with Microsoft Sentinel. You can use Cognni to autonomously map your previously unclassified important information and detect related incidents. This allows you to recognize risks to your important information, understand the severity of the incidents, and investigate the details you need to remediate, fast enough to make a difference. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect to Cognni** + +1. Go to [Cognni integrations page](https://intelligence.cognni.ai/integrations) +2. Click **'Connect'** on the 'Microsoft Sentinel' box +3. Copy and paste **'workspaceId'** and **'sharedKey'** (from below) to the related fields on Cognni's integrations screen +4. Click the **'Connect'** botton to complete the configuration. + Soon, all your Cognni-detected incidents will be forwarded here (into Microsoft Sentinel) + +Not a Cognni user? [Join us](https://azuremarketplace.microsoft.com/en-us/marketplace/apps/shieldox.appsource_freetrial) +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Shared Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/cognyteluminar.md b/Tools/Solutions Analyzer/connector-docs/connectors/cognyteluminar.md index 85b056d94b3..2aae308a411 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/cognyteluminar.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/cognyteluminar.md @@ -10,4 +10,85 @@ Luminar IOCs and Leaked Credentials connector allows integration of intelligence-based IOC data and customer-related leaked records identified by Luminar. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Azure Subscription**: Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group. +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **Luminar Client ID**, **Luminar Client Secret** and **Luminar Account ID** are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Cognyte Luminar API to pull Luminar IOCs and Leaked Credentials into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**1. Option 1 - Azure Resource Manager (ARM) Template for Flex Consumption Plan** + +Use this method for automated deployment of the data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CognyteLuminar-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Application ID**, **Tenant ID**,**Client Secret**, **Luminar API Client ID**, **Luminar API Account ID**, **Luminar API Client Secret**, **Luminar Initial Fetch Date**, **TimeInterval** and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**2. Option 1 - Azure Resource Manager (ARM) Template for Premium Plan** + +Use this method for automated deployment of the data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CognyteLuminar-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Application ID**, **Tenant ID**,**Client Secret**, **Luminar API Client ID**, **Luminar API Account ID**, **Luminar API Client Secret**, **Luminar Initial Fetch Date**, **TimeInterval** and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**3. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Cognyte Luminar data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> NOTE:You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-CognyteLuminar-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CognyteLuminarXXX). + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\tApplication ID\n\tTenant ID\n\tClient Secret\n\tLuminar API Client ID\n\tLuminar API Account ID\n\tLuminar API Client Secret\n\tLuminar Initial Fetch Date\n\tTimeInterval - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`\n3. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/cohesitydataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/cohesitydataconnector.md index 889f0716798..9006a0b3a44 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/cohesitydataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/cohesitydataconnector.md @@ -10,4 +10,40 @@ The Cohesity function apps provide the ability to ingest Cohesity Datahawk ransomware alerts into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Azure Blob Storage connection string and container name**: Azure Blob Storage connection string and container name + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions that connect to the Azure Blob Storage and KeyVault. This might result in additional costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/), [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) and [Azure KeyVault pricing page](https://azure.microsoft.com/pricing/details/key-vault/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Get a Cohesity DataHawk API key (see troubleshooting [instruction 1](https://github.com/Azure/Azure-Sentinel/tree/master/Solutions/CohesitySecurity/Data%20Connectors/Helios2Sentinel/IncidentProducer))** + +**STEP 2 - Register Azure app ([link](https://portal.azure.com/#view/Microsoft_AAD_IAM/ActiveDirectoryMenuBlade/~/RegisteredApps)) and save Application (client) ID, Directory (tenant) ID, and Secret Value ([instructions](https://learn.microsoft.com/en-us/azure/healthcare-apis/register-application)). Grant it Azure Storage (user_impersonation) permission. Also, assign the 'Microsoft Sentinel Contributor' role to the application in the appropriate subscription.** + +**STEP 3 - Deploy the connector and the associated Azure Functions**. + +**4. Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Cohesity data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-Cohesity-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the parameters that you created at the previous steps +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/commvaultsecurityiq-cl.md b/Tools/Solutions Analyzer/connector-docs/connectors/commvaultsecurityiq-cl.md index ffb1d02b2ed..6aeefdd6e89 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/commvaultsecurityiq-cl.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/commvaultsecurityiq-cl.md @@ -10,4 +10,46 @@ This Azure Function enables Commvault users to ingest alerts/events into their Microsoft Sentinel instance. With Analytic Rules,Microsoft Sentinel can automatically create Microsoft Sentinel incidents from incoming events and logs. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Commvault Environment Endpoint URL**: Make sure to follow the documentation and set the secret value in KeyVault +- **Commvault QSDK Token**: Make sure to follow the documentation and set the secret value in KeyVault + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to a Commvault Instance to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Configuration steps for the Commvalut QSDK Token** + +[Follow these instructions](https://documentation.commvault.com/2024e/essential/creating_access_token.html) to create an API Token. + +**STEP 2 - Deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the CommvaultSecurityIQ data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Commvault Endpoint URL and QSDK Token, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Commvault Security IQ data connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CommvaultSecurityIQ-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Region**. +3. Enter the **Workspace ID**, **Workspace Key** 'and/or Other required fields' and click Next. +4. Click **Create** to deploy. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/confluenceauditapi.md b/Tools/Solutions Analyzer/connector-docs/connectors/confluenceauditapi.md index 369fd4460c7..30a8bc19a3b 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/confluenceauditapi.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/confluenceauditapi.md @@ -10,4 +10,92 @@ The [Atlassian Confluence](https://www.atlassian.com/software/confluence) Audit data connector provides the capability to ingest [Confluence Audit Records](https://support.atlassian.com/confluence-cloud/docs/view-the-audit-log/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

+## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **ConfluenceAccessToken**, **ConfluenceUsername** is required for REST API. [See the documentation to learn more about API](https://developer.atlassian.com/cloud/confluence/rest/api-group-audit/). Check all [requirements and follow the instructions](https://developer.atlassian.com/cloud/confluence/rest/intro/#auth) for obtaining credentials. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Confluence REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Configuration steps for the Confluence API** + + [Follow the instructions](https://developer.atlassian.com/cloud/confluence/rest/intro/#auth) to obtain the credentials. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Confluence Audit data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-confluenceaudit-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-confluenceaudit-azuredeploy-gov) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. +3. Enter the **ConfluenceAccessToken**, **ConfluenceUsername**, **ConfluenceHomeSiteName** (short site name part, as example HOMESITENAME from https://community.atlassian.com) and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Confluence Audit data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-confluenceauditapi-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ConflAuditXXXXX). + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select ** New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + ConfluenceUsername + ConfluenceAccessToken + ConfluenceHomeSiteName + WorkspaceID + WorkspaceKey + logAnalyticsUri (optional) +> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/confluenceauditccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/confluenceauditccpdefinition.md index c4f0f9a5642..db95586b584 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/confluenceauditccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/confluenceauditccpdefinition.md @@ -10,4 +10,43 @@ The [Atlassian Confluence](https://www.atlassian.com/software/confluence) Audit data connector provides the capability to ingest [Confluence Audit Records](https://support.atlassian.com/confluence-cloud/docs/view-the-audit-log/) events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://support.atlassian.com/confluence-cloud/docs/view-the-audit-log/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **Atlassian Confluence API access**: Permission of [Administer Confluence](https://developer.atlassian.com/cloud/confluence/rest/v1/intro/#auth) is required to get access to the Confluence Audit logs API. See [Confluence API documentation](https://developer.atlassian.com/cloud/confluence/rest/v1/api-group-audit/#api-wiki-rest-api-audit-get) to learn more about the audit API. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +To enable the Atlassian Confluence connector for Microsoft Sentinel, click to add an organization, fill the form with the Confluence environment credentials and click to Connect. + Follow [these steps](https://support.atlassian.com/atlassian-account/docs/manage-api-tokens-for-your-atlassian-account/) to create an API token. +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Atlassian Confluence organization URL** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + +**Add organization** + +*Add Atlassian Confluence organization* + +When you click the "Add organization" button in the portal, a configuration form will open. You'll need to provide: + +- **Atlassian Confluence organization URL** (optional): .atlassian.net +- **User Name** (optional): User Name (e.g., user@example.com) +- **API Token** (optional): API Token + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/contrastadr.md b/Tools/Solutions Analyzer/connector-docs/connectors/contrastadr.md index 7c6ccaa3a71..c1414305a67 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/contrastadr.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/contrastadr.md @@ -10,4 +10,32 @@ The ContrastADR data connector provides the capability to ingest Contrast ADR attack events into Microsoft Sentinel using the ContrastADR Webhook. ContrastADR data connector can enrich the incoming webhook data with ContrastADR API enrichment calls. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Use these Workspace id and primakey key as shared key in azure function app +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**2. Azure Resource Manager (ARM) Template** + +Use this method to automate deployment of the ContrastADR Data Connector using ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ContrastADR-azuredeploy) +2. Provide the following parameters: Region, Function Name, LOG_ANALYTICS_SHARED_KEY, LOG_ANALYTICS_WORKSPACE_ID + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/contrastprotect.md b/Tools/Solutions Analyzer/connector-docs/connectors/contrastprotect.md index f12e663cdc0..a4775c28cac 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/contrastprotect.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/contrastprotect.md @@ -10,4 +10,57 @@ Contrast Protect mitigates security threats in production applications with runtime protection and observability. Attack event results (blocked, probed, suspicious...) and other information can be sent to Microsoft Microsoft Sentinel to blend with security information from other systems. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Common Event Format (CEF) logs to Syslog agent** + +Configure the Contrast Protect agent to forward events to syslog as described here: https://docs.contrastsecurity.com/en/output-to-syslog.html. Generate some attack events for your application. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/contrastprotectama.md b/Tools/Solutions Analyzer/connector-docs/connectors/contrastprotectama.md index 633db853d1a..5733a891697 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/contrastprotectama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/contrastprotectama.md @@ -10,4 +10,61 @@ Contrast Protect mitigates security threats in production applications with runtime protection and observability. Attack event results (blocked, probed, suspicious...) and other information can be sent to Microsoft Microsoft Sentinel to blend with security information from other systems. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Common Event Format (CEF) logs to Syslog agent** + + Configure the Contrast Protect agent to forward events to syslog as described here: https://docs.contrastsecurity.com/en/output-to-syslog.html. Generate some attack events for your application. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/corelightconnectorexporter.md b/Tools/Solutions Analyzer/connector-docs/connectors/corelightconnectorexporter.md index 38d9752a992..17b81152b86 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/corelightconnectorexporter.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/corelightconnectorexporter.md @@ -10,4 +10,37 @@ The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution. + +**1. Get the files** + +Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration. + +**2. Replay sample data.** + +Replay sample data to create the needed tables in your Log Analytics workspace. +- **Send sample data (only needed once per Log Analytics workspace)**: `./send_samples.py --workspace-id {0} --workspace-key {1}` + +**3. Install custom exporter.** + +Install the custom exporter or the logstash container. + +**4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent.** + +Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Workspace Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/cortexxdrdataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/cortexxdrdataconnector.md index ccc7be2a864..a3811e25cfb 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/cortexxdrdataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/cortexxdrdataconnector.md @@ -10,4 +10,32 @@ The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +#### Configuration steps for the Palo Alto Cortex XDR API + Follow the instructions to obtain the credentials. you can also follow this [guide](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/3u3j0e7hcx8t1-get-started-with-cortex-xdr-ap-is) to generate API key. +#### 1. Retrieve API URL + 1.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials + 1.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] + 1.3. Under [**Integrations**] click on [**API Keys**]. + 1.4. In the [**Settings**] Page click on [**Copy API URL**] in the top right corner. +#### 2. Retrieve API Token + 2.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials + 2.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] + 2.3. Under [**Integrations**] click on [**API Keys**]. + 2.4. In the [**Settings**] Page click on [**New Key**] in the top right corner. + 2.5. Choose security level, role, choose Standard and click on [**Generate**] + 2.6. Copy the API Token, once it generated the [**API Token ID**] can be found under the ID column +- **Base API URL**: https://api-example.xdr.au.paloaltonetworks.com +- **API Key ID**: API ID +- **API Token**: (password field) +- Click 'Connect' to establish connection + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/cribl.md b/Tools/Solutions Analyzer/connector-docs/connectors/cribl.md index 19443faa086..9b8c23aa27e 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/cribl.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/cribl.md @@ -10,4 +10,20 @@ The [Cribl](https://cribl.io/accelerate-cloud-migration/) connector allows you to easily connect your Cribl (Cribl Enterprise Edition - Standalone) logs with Microsoft Sentinel. This gives you more security insight into your organization's data pipelines. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Installation and setup instructions for Cribl Stream for Microsoft Sentinel** + +Use the documentation from this Github repository and configure Cribl Stream using + +https://docs.cribl.io/stream/usecase-azure-workspace/ + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/crowdstrikeapiccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/crowdstrikeapiccpdefinition.md index 260600e295c..d4aced8bd7a 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/crowdstrikeapiccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/crowdstrikeapiccpdefinition.md @@ -10,4 +10,25 @@ The [CrowdStrike Data Connector](https://www.crowdstrike.com/) allows ingesting logs from the CrowdStrike API into Microsoft Sentinel. This connector is built on the Microsoft Sentinel Codeless Connector Platform and uses the CrowdStrike API to fetch logs for Alerts, Detections, Hosts, Incidents, and Vulnerabilities. It supports DCR-based ingestion time transformations so that queries can run more efficiently. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Configuration steps for the CrowdStrike API** + +Follow the instructions below to obtain your CrowdStrike API credentials. +#### 1. Retrieve API URL +Log in to your CrowdStrike Console and navigate to the API section to copy your Base API URL. +#### 2. Retrieve Client Credentials +Obtain your Client ID and Client Secret from the API credentials section in your CrowdStrike account. +- **Base API URL**: https://api.us-2.crowdstrike.com +- **Client ID**: Your Client ID +- **Client Secret**: (password field) +- Click 'Connect' to establish connection + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/crowdstrikefalconadversaryintelligence.md b/Tools/Solutions Analyzer/connector-docs/connectors/crowdstrikefalconadversaryintelligence.md index 4dcc15190dd..16c1f9c2cd2 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/crowdstrikefalconadversaryintelligence.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/crowdstrikefalconadversaryintelligence.md @@ -10,4 +10,89 @@ The [CrowdStrike](https://www.crowdstrike.com/) Falcon Indicators of Compromise connector retrieves the Indicators of Compromise from the Falcon Intel API and uploads them [Microsoft Sentinel Threat Intel](https://learn.microsoft.com/en-us/azure/sentinel/understand-threat-intelligence). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **CrowdStrike API Client ID and Client Secret**: **CROWDSTRIKE_CLIENT_ID**, **CROWDSTRIKE_CLIENT_SECRET**, **CROWDSTRIKE_BASE_URL**. CrowdStrike credentials must have Indicators (Falcon Intelligence) read scope. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**STEP 1 - [Generate CrowdStrike API credentials](https://www.crowdstrike.com/blog/tech-center/get-access-falcon-apis/).** + +Make sure 'Indicators (Falcon Intelligence)' scope has 'read' selected + +**STEP 2 - [Register an Entra App](https://learn.microsoft.com/en-us/entra/identity-platform/quickstart-register-app) with client secret.** + +Provide the Entra App principal with 'Microsoft Sentinel Contributor' role assignment on the respective log analytics workspace. [How to assign roles on Azure](https://learn.microsoft.com/en-us/azure/role-based-access-control/role-assignments-portal). + +**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the CrowdStrike Falcon Indicator of Compromise connector, have the Workspace ID (can be copied from the following). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**6. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the CrowdStrike Falcon Adversary Intelligence connector connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdStrikeFalconAdversaryIntelligence-azuredeploy) +2. Provide the following parameters: CrowdStrikeClientId, CrowdStrikeClientSecret, CrowdStrikeBaseUrl, WorkspaceId, TenantId, Indicators, AadClientId, AadClientSecret, LookBackDays + +**7. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the CrowdStrike Falcon Adversary Intelligence connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdStrikeFalconAdversaryIntelligence-Functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CrowdStrikeFalconIOCXXXXX). + + e. **Select a runtime:** Choose Python 3.12. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select ** New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + CROWDSTRIKE_CLIENT_ID + CROWDSTRIKE_CLIENT_SECRET + CROWDSTRIKE_BASE_URL + TENANT_ID + INDICATORS + WorkspaceKey + AAD_CLIENT_ID + AAD_CLIENT_SECRET + LOOK_BACK_DAYS + WORKSPACE_ID +4. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/crowdstrikefalconendpointprotection.md b/Tools/Solutions Analyzer/connector-docs/connectors/crowdstrikefalconendpointprotection.md index b90dff2a0ba..cbab844a9d2 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/crowdstrikefalconendpointprotection.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/crowdstrikefalconendpointprotection.md @@ -10,4 +10,61 @@ The [CrowdStrike Falcon Endpoint Protection](https://www.crowdstrike.com/endpoint-security-products/) connector allows you to easily connect your CrowdStrike Falcon Event Stream with Microsoft Sentinel, to create custom dashboards, alerts, and improve investigation. This gives you more insight into your organization's endpoints and improves your security operation capabilities.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

+## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Crowd Strike Falcon Endpoint Protection and load the function code or click [here](https://aka.ms/sentinel-crowdstrikefalconendpointprotection-parser), on the second line of the query, enter the hostname(s) of your CrowdStrikeFalcon device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward CrowdStrike Falcon Event Stream logs to a Syslog agent** + +Deploy the CrowdStrike Falcon SIEM Collector to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent. +1. [Follow these instructions](https://www.crowdstrike.com/blog/tech-center/integrate-with-your-siem/) to deploy the SIEM Collector and forward syslog +2. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/crowdstrikefalconendpointprotectionama.md b/Tools/Solutions Analyzer/connector-docs/connectors/crowdstrikefalconendpointprotectionama.md index 928725a90a3..8500d92a92a 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/crowdstrikefalconendpointprotectionama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/crowdstrikefalconendpointprotectionama.md @@ -10,4 +10,61 @@ The [CrowdStrike Falcon Endpoint Protection](https://www.crowdstrike.com/endpoint-security-products/) connector allows you to easily connect your CrowdStrike Falcon Event Stream with Microsoft Sentinel, to create custom dashboards, alerts, and improve investigation. This gives you more insight into your organization's endpoints and improves your security operation capabilities.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

+## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Crowd Strike Falcon Endpoint Protection and load the function code or click [here](https://aka.ms/sentinel-crowdstrikefalconendpointprotection-parser), on the second line of the query, enter the hostname(s) of your CrowdStrikeFalcon device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward CrowdStrike Falcon Event Stream logs to a Syslog agent** + + Deploy the CrowdStrike Falcon SIEM Collector to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent. +1. [Follow these instructions](https://www.crowdstrike.com/blog/tech-center/integrate-with-your-siem/) to deploy the SIEM Collector and forward syslog +2. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/crowdstrikefalcons3ccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/crowdstrikefalcons3ccpdefinition.md index 7a86c2749af..3164bbd542b 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/crowdstrikefalcons3ccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/crowdstrikefalcons3ccpdefinition.md @@ -10,4 +10,81 @@ The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

+## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +#### Requirements: + In order to use the Falcon Data Replicator feature the following are required: + 1. **Subscription:** + 1.1. Falcon Data Replicator. + 1.2. Falcon Insight XDR. + 2. **Roles:** + 2.1. Falcon Administrator. +#### 1. Setup your CrowdStrike & AWS environments + To configure access on AWS, use the following two templates provided to set up the AWS environment. This will enable sending logs from an S3 bucket to your Log Analytics Workspace. + #### For each template, create Stack in AWS: + 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). + 2. Choose the ‘Specify template’ option, then ‘Upload a template file’ by clicking on ‘Choose file’ and selecting the appropriate CloudFormation template file provided below. click ‘Choose file’ and select the downloaded template. + 3. Click 'Next' and 'Create stack'. +Make sure that your bucket will be created in the same AWS region as your Falcon CID where the FDR feed is provisioned. + | CrowdStrike region | AWS region | + |-----------------|-----------| + | US-1 | us-west-1 | + | US-2 | us-west-2 | + | EU-1 | eu-central-1 +- **Template 1: OpenID connect authentication deployment**: `Oidc` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Template 2: AWS CrowdStrike resources deployment**: `CrowdStrike` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### Using your own S3 Bucket + In order to use your own S3 bucket you can refernace the following guide [Use your own S3 bucket](https://falcon.us-2.crowdstrike.com/documentation/page/fa572b1c/falcon-data-replicator#g4f79236) or follow this steps: + 1. Create support case with the following Name: **Using Self S3 bucket for FDR** + 2. Add the following information: + 2.1. The Falcon CID where your FDR feed is provisioned + 2.2. Indicate which types of events you wish to have provided in this new FDR feed. + 2.3. Indicate which types of events you wish to have provided in this new FDR feed. + 2.4. Do not use any partitions. + | Event type | S3 prefix | + |-----------------|-----------| + | Primary Events | data/ | + | Secondary Events | fdrv2/ +#### 2. Connect new collectors + To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect. +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Role ARN** +- **Queue URL** +- **Stream name** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + +**Add new controller** + +*AWS S3 connector* + +When you click the "Add new collector" button in the portal, a configuration form will open. You'll need to provide: + +*Account details* + +- **Role ARN** (required) +- **Queue URL** (required) +- **Data type** (required): Select from available options + - Primary Events + - Secondary Events + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/crowdstrikereplicatorv2.md b/Tools/Solutions Analyzer/connector-docs/connectors/crowdstrikereplicatorv2.md index 6180cef52d2..d691440bf86 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/crowdstrikereplicatorv2.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/crowdstrikereplicatorv2.md @@ -10,4 +10,89 @@ This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

+## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **SQS and AWS S3 account credentials/permissions**: **AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](https://www.crowdstrike.com/blog/tech-center/intro-to-falcon-data-replicator/). To start, contact CrowdStrike support. At your request they will create a CrowdStrike managed Amazon Web Services (AWS) S3 bucket for short term storage purposes as well as a SQS (simple queue service) account for monitoring changes to the S3 bucket. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**1. Prerequisites** + +1. Configure FDR in CrowdStrike - You must contact the [CrowdStrike support team](https://supportportal.crowdstrike.com/) to enable CrowdStrike FDR. + - Once CrowdStrike FDR is enabled, from the CrowdStrike console, navigate to Support --> API Clients and Keys. + - You need to Create new credentials to copy the AWS Access Key ID, AWS Secret Access Key, SQS Queue URL and AWS Region. +2. Register AAD application - For DCR to authentiate to ingest data into log analytics, you must use AAD application. + - [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. + - For **AAD Principal** Id of this application, access the AAD App through [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and capture Object Id from the application overview page. + +**2. Deployment Options** + +Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Crowdstrike Falcon Data Replicator connector V2 using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-gov) +2. Provide the required details such as Microsoft Sentinel Workspace, CrowdStrike AWS credentials, Azure AD Application details and ingestion configurations +> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. It is recommended to create a new Resource Group for deployment of function app and associated resources. +3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +4. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Crowdstrike Falcon Data Replicator connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy DCE, DCR and Custom Tables for data ingestion** + +1. Deploy the required DCE, DCR(s) and the Custom Tables by using the [Data Collection Resource ARM template](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-data-resource) +2. After successful deployment of DCE and DCR(s), get the below information and keep it handy (required during Azure Functions app deployment). + - DCE log ingestion - Follow the instructions available at [Create data collection endpoint](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-data-collection-endpoint) (Step 3). + - Immutable Ids of one or more DCRs (as applicable) - Follow the instructions available at [Collect information from the DCR](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#collect-information-from-the-dcr) (Stpe 2). + +**2. Deploy a Function App** + +1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-functionapp) file. Extract archive to your local development computer. +2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode. +3. After successful deployment of the function app, follow next steps for configuring it. + +**3. Configure the Function App** + +1. Go to Azure Portal for the Function App configuration. +2. In the Function App, select the Function App Name and select **Configuration**. +3. In the **Application settings** tab, select ** New application setting**. +4. Add each of the following application settings individually, with their respective string values (case-sensitive): + AWS_KEY + AWS_SECRET + AWS_REGION_NAME + QUEUE_URL + USER_SELECTION_REQUIRE_RAW //True if raw data is required + USER_SELECTION_REQUIRE_SECONDARY //True if secondary data is required + MAX_QUEUE_MESSAGES_MAIN_QUEUE // 100 for consumption and 150 for Premium + MAX_SCRIPT_EXEC_TIME_MINUTES // add the value of 10 here + AZURE_TENANT_ID + AZURE_CLIENT_ID + AZURE_CLIENT_SECRET + DCE_INGESTION_ENDPOINT + NORMALIZED_DCR_ID + RAW_DATA_DCR_ID + EVENT_TO_TABLE_MAPPING_LINK // File is present on github. Add if the file can be accessed using internet + REQUIRED_FIELDS_SCHEMA_LINK //File is present on github. Add if the file can be accessed using internet + Schedule //Add value as '0 */1 * * * *' to ensure the function runs every minute. +5. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ctera.md b/Tools/Solutions Analyzer/connector-docs/connectors/ctera.md index 1dc10609e45..cb83c68a700 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ctera.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ctera.md @@ -16,4 +16,21 @@ The CTERA Data Connector for Microsoft Sentinel offers monitoring and threat det Additionally, it helps you identify critical patterns such as mass access denied events, mass deletions, and mass permission changes, enabling proactive threat management and response. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Step 1: Connect CTERA Platform to Syslog** + +Set up your CTERA portal syslog connection and Edge-Filer Syslog connector + +**2. Step 2: Install Azure Monitor Agent (AMA) on Syslog Server** + +Install the Azure Monitor Agent (AMA) on your syslog server to enable data collection. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/cyberark.md b/Tools/Solutions Analyzer/connector-docs/connectors/cyberark.md index 1eeb32f6364..bf047d381d5 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/cyberark.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/cyberark.md @@ -10,4 +10,59 @@ CyberArk Enterprise Password Vault generates an xml Syslog message for every action taken against the Vault. The EPV will send the xml messages through the Microsoft Sentinel.xsl translator to be converted into CEF standard format and sent to a syslog staging server of your choice (syslog-ng, rsyslog). The Log Analytics agent installed on your syslog staging server will import the messages into Microsoft Log Analytics. Refer to the [CyberArk documentation](https://docs.cyberark.com/Product-Doc/OnlineHelp/PAS/Latest/en/Content/PASIMP/DV-Integrating-with-SIEM-Applications.htm) for more guidance on SIEM integrations. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python installed on your machine. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Common Event Format (CEF) logs to Syslog agent** + +On the EPV configure the dbparm.ini to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machines IP address. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python installed on your machine using the following command: python -version + +> + +> 2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machines security according to your organizations security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/cyberarkama.md b/Tools/Solutions Analyzer/connector-docs/connectors/cyberarkama.md index a705d09418a..f79ca68b775 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/cyberarkama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/cyberarkama.md @@ -10,4 +10,58 @@ CyberArk Privilege Access Manager generates an xml Syslog message for every action taken against the Vault. The PAM will send the xml messages through the Microsoft Sentinel.xsl translator to be converted into CEF standard format and sent to a syslog staging server of your choice (syslog-ng, rsyslog). The Log Analytics agent installed on your syslog staging server will import the messages into Microsoft Log Analytics. Refer to the [CyberArk documentation](https://docs.cyberark.com/privilege-cloud-standard/Latest/en/Content/Privilege%20Cloud/privCloud-connect-siem.htm) for more guidance on SIEM integrations. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Common Event Format (CEF) logs to Syslog agent** + + On the EPV configure the dbparm.ini to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machines IP address. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machines security according to your organizations security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/cyberarkaudit.md b/Tools/Solutions Analyzer/connector-docs/connectors/cyberarkaudit.md index 679a8a2413f..1322ac873e5 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/cyberarkaudit.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/cyberarkaudit.md @@ -10,4 +10,92 @@ The [CyberArk Audit](https://docs.cyberark.com/Audit/Latest/en/Content/Resources/_TopNav/cc_Home.htm) data connector provides the capability to retrieve security event logs of the CyberArk Audit service and more events into Microsoft Sentinel through the REST API. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Audit REST API Connections details and Credentials**: **OauthUsername**, **OauthPassword**, **WebAppID**, **AuditApiKey**, **IdentityEndpoint** and **AuditApiBaseUrl** are required for making API calls. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Azure Blob Storage API to pull logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details. + +>**NOTE:** API authorization key(s) or token(s) are securely stored in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. + +**STEP 1 - Configuration steps for the CyberArk Audit SIEM Integration** + + Follow the [instructions](https://docs.cyberark.com/audit/latest/en/Content/Audit/isp_Microsoft_Sentinel.htm?tocpath=SIEM%20integrations%7C_____3) to obtain connection details and credentials. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the CyberArk Audit data connector, have the Workspace Name and Workspace Location (can be copied from the following). +- **Workspace Name**: `WorkspaceName` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Workspace Location**: `WorkspaceLocation` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the CyberArk Audit data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CyberArkAuditAPI-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. +3. Enter the **CyberArkAuditUsername**, **CyberArkAuditPassword**, **CyberArkAuditServerURL** and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the CyberArk Audit data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-CyberArkAudit-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CyberArkXXXXX). + + e. **Select a runtime:** Choose Python 3.10. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select ** New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + CyberArkAuditUsername + CyberArkAuditPassword + CyberArkAuditServerURL + WorkspaceID + WorkspaceKey + logAnalyticsUri (optional) +> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/cyberpionsecuritylogs.md b/Tools/Solutions Analyzer/connector-docs/connectors/cyberpionsecuritylogs.md index bb9fcdf2f41..db7876b509c 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/cyberpionsecuritylogs.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/cyberpionsecuritylogs.md @@ -10,4 +10,23 @@ The IONIX Security Logs data connector, ingests logs from the IONIX system directly into Sentinel. The connector allows users to visualize their data, create alerts and incidents and improve security investigations. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **IONIX Subscription**: a subscription and account is required for IONIX logs. [One can be acquired here.](https://azuremarketplace.microsoft.com/en/marketplace/apps/cyberpion1597832716616.cyberpion) + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Follow the [instructions](https://www.ionix.io/integrations/azure-sentinel/) to integrate IONIX Security Alerts into Sentinel. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/cybersixgillactionablealerts.md b/Tools/Solutions Analyzer/connector-docs/connectors/cybersixgillactionablealerts.md index b1e88ddd57f..114ee645c6e 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/cybersixgillactionablealerts.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/cybersixgillactionablealerts.md @@ -10,4 +10,84 @@ Actionable alerts provide customized alerts based on configured assets +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **Client_ID** and **Client_Secret** are required for making API calls. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Cybersixgill API to pull Alerts into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**1. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Cybersixgill Actionable Alerts data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/senitnel-cybersixgill-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID**, **Workspace Key**, **Client ID**, **Client Secret**, **TimeInterval** and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**2. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Cybersixgill Actionable Alerts data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> NOTE:You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cybersixgill-Actionable-Alerts/Data%20Connectors/CybersixgillAlerts.zip?raw=true) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CybersixgillAlertsXXX). + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + ClientID + ClientSecret + Polling + WorkspaceID + WorkspaceKey + logAnalyticsUri (optional) + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us` +3. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/cyborgsecurity-hunter.md b/Tools/Solutions Analyzer/connector-docs/connectors/cyborgsecurity-hunter.md index 74b250aedaf..094d969b9d1 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/cyborgsecurity-hunter.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/cyborgsecurity-hunter.md @@ -14,4 +14,37 @@ Cyborg Security is a leading provider of advanced threat hunting solutions, with Follow the steps to gain access to Cyborg Security's Community and setup the 'Open in Tool' capabilities in the HUNTER Platform. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +ℹ️ Use the following link to find your Azure Tentant ID How to find your Azure Active Directory tenant ID +- **ResourceGroupName & WorkspaceName**: `{0}` +- **WorkspaceID**: `{0}` + +**1. Sign up for Cyborg Security's HUNTER Community Account** + +Cyborg Security offers Community Memebers access to a subset of the Emerging Threat Collections and hunt packages. + +Create a Free Commuinity Account to get access to Cyborg Security's Hunt Packages: [Sign Up Now!](https://www.cyborgsecurity.com/user-account-creation/) + +**2. Configure the Open in Tool Feature** + +1. Navigate to the [Environment](https://hunter.cyborgsecurity.io/environment) section of the HUNTER Platform. +2. Fill in te **Root URI** of your environment in the section labeled **Microsoft Sentinel**. Replace the with the IDs and Names of your Subscription, Resource Groups and Workspaces. + + https[]()://portal.azure.com#@**AzureTenantID**/blade/Microsoft_OperationsManagementSuite_Workspace/Logs.ReactView/resourceId/%2Fsubscriptions%2F**AzureSubscriptionID**%2Fresourcegroups%2F**ResourceGroupName**%2Fproviders%2Fmicrosoft.operationalinsights%2Fworkspaces%2F<**WorkspaceName**>/ +3. Click **Save**. + +**3. Execute a HUNTER hunt pacakge in Microsoft Sentinel** + +Identify a Cyborg Security HUNTER hunt package to deploy and use the **Open In Tool** button to quickly open Microsoft Sentinel and stage the hunting content. + +![image](https://7924572.fs1.hubspotusercontent-na1.net/hubfs/7924572/HUNTER/Screenshots/openintool-ms-new.png) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/cyeradspmccf.md b/Tools/Solutions Analyzer/connector-docs/connectors/cyeradspmccf.md index 05fb7dab47c..678e7364afc 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/cyeradspmccf.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/cyeradspmccf.md @@ -1,4 +1,4 @@ -# Cyera DSPM Azure Sentinel Data Connector +# Cyera DSPM Microsoft Sentinel Data Connector | | | |----------|-------| @@ -8,6 +8,22 @@ | **Used in Solutions** | [CyeraDSPM](../solutions/cyeradspm.md) | | **Connector Definition Files** | [CyeraDSPMLogs_ConnectorDefinitionCCF.json](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_CCF/CyeraDSPMLogs_ConnectorDefinitionCCF.json) | -The [Cyera DSPM](https://api.cyera.io/) data connector allows you to connect to your Cyera's DSPM tenant and ingesting Classifications, Assets, Issues, and Identity Resources/Definitions into Microsoft Sentinel. The data connector is built on Microsoft Sentinel's Codeless Connector Framework and uses the Cyera's API to fetch Cyera's [DSPM Telemetry](https://www.cyera.com/) once recieced can be correlated with security events creating custom columns so that queries don't need to parse it again, thus resulting in better performance. +The [Cyera DSPM](https://api.cyera.io/) data connector allows you to connect to your Cyera's DSPM tenant and ingesting Classifications, Assets, Issues, and Identity Resources/Definitions into Microsoft Sentinel. The data connector is built on Microsoft Sentinel's Codeless Connector Framework and uses the Cyera's API to fetch Cyera's [DSPM Telemetry](https://www.cyera.com/) once received can be correlated with security events creating custom columns so that queries don't need to parse it again, thus resulting in better performance. + +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Cyera DSPM Authentication** + +Connect to your Cyera DSPM tenenant via Personal Access Tokens +- **Cyera Personal Access Token Client ID**: client_id +- **Cyera Personal Access Token Secret Key**: (password field) +- Click 'Connect' to establish connection [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/cyerafunctionsconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/cyerafunctionsconnector.md index a075ee5ef23..83ed0a7e532 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/cyerafunctionsconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/cyerafunctionsconnector.md @@ -1,4 +1,4 @@ -# Cyera DSPM Azure Functions Sentinel Data Connector +# Cyera DSPM Azure Functions Microsoft Sentinel Data Connector | | | |----------|-------| @@ -8,6 +8,62 @@ | **Used in Solutions** | [CyeraDSPM](../solutions/cyeradspm.md) | | **Connector Definition Files** | [FunctionAppDC.json](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_Functions/FunctionAppDC.json) | -The **Cyera DSPM Azure Function Connector** enables seamless ingestion of Cyera’s **Data Security Posture Management (DSPM)** telemetry — *Assets*, *Identities*, *Issues*, and *Classifications* — into **Microsoft Sentinel**.\n\nThis connector uses an **Azure Function App** to call Cyera’s REST API on a schedule, fetch the latest DSPM telemetry, and send it to Sentinel through the **Azure Monitor Logs Ingestion API** via a **Data Collection Endpoint (DCE)** and **Data Collection Rule (DCR, kind: Direct)** — no agents required.\n\n**Tables created/used**\n\n| Entity | Table | Purpose |\n|---|---|---|\n| Assets | `CyeraAssets_CL` | Raw asset metadata and data-store context |\n| Identities | `CyeraIdentities_CL` | Identity definitions and sensitivity context |\n| Issues | `CyeraIssues_CL` | Findings and remediation details |\n| Classifications | `CyeraClassifications_CL` | Data class & sensitivity definitions |\n| MS View | `CyeraAssets_MS_CL` | Normalized asset view for dashboards |\n\n> **Note:** This v7 connector supersedes the earlier CCF-based approach and aligns with Microsoft’s recommended Direct ingestion path for Sentinel. +The **Cyera DSPM Azure Function Connector** enables seamless ingestion of Cyera’s **Data Security Posture Management (DSPM)** telemetry — *Assets*, *Identities*, *Issues*, and *Classifications* — into **Microsoft Sentinel**.\n\nThis connector uses an **Azure Function App** to call Cyera’s REST API on a schedule, fetch the latest DSPM telemetry, and send it to Microsoft Sentinel through the **Azure Monitor Logs Ingestion API** via a **Data Collection Endpoint (DCE)** and **Data Collection Rule (DCR, kind: Direct)** — no agents required.\n\n**Tables created/used**\n\n| Entity | Table | Purpose |\n|---|---|---|\n| Assets | `CyeraAssets_CL` | Raw asset metadata and data-store context |\n| Identities | `CyeraIdentities_CL` | Identity definitions and sensitivity context |\n| Issues | `CyeraIssues_CL` | Findings and remediation details |\n| Classifications | `CyeraClassifications_CL` | Data class & sensitivity definitions |\n| MS View | `CyeraAssets_MS_CL` | Normalized asset view for dashboards |\n\n> **Note:** This v7 connector supersedes the earlier CCF-based approach and aligns with Microsoft’s recommended Direct ingestion path for Microsoft Sentinel. + +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Note** + +>**NOTE:** This connector uses an **Azure Function App** and the **Azure Monitor Logs Ingestion API** (DCE + DCR, kind: Direct). Function runtime and data egress may incur charges. See [Azure Functions pricing](https://azure.microsoft.com/pricing/details/functions/). + +**2. Optional Step** + +>**(Optional)** Store Cyera API credentials in **Azure Key Vault** and reference them from the Function App. See [Key Vault references](https://learn.microsoft.com/azure/app-service/app-service-key-vault-references). + +**3. STEP 1 — Prepare Cyera API Access** + +1) Generate a **Personal Access Token** [Generating Personal Access Token](https://support.cyera.io/hc/en-us/articles/19446274608919-Personal-and-API-Tokens) in your Cyera tenant.\n2) Note **API Base URL**, **Client ID**, and **Client Secret**. + +**4. STEP 2 — Choose ONE deployment option** + +> Before deploying, have these values handy: +- **Cyera Function Connector Name**: `CyeraDSPMConnector` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Workspace Name**: `{{workspace-location}}` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Workspace Location**: `{{workspace-location}}` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Cyera Base URL**: `https://api.cyera.io` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Cyera Personal Access Token Client ID**: `CyeraClientID` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Cyera Personal Access Token Secret**: `CyeraSecret` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**5. Option 1** + +**Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Cyera DSPM Functions and all required resources to support the connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/{{deployment-template-uri) +2. Select the preferred **FunctionName** and **Workspace Name**. +3. Enter the **Workspace Location**, **Cyera API Base Url**, **Personal Access Token Client ID**, and **Personal Access Token Secret**. +>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**6. Option 2 — Manual Deployment** + +Follow the [install pack’s step-by-step guide]({{userguide-url}}.\n\n1) Create/update the 5 custom tables, data collection rule with format `sentinel-dce-`, and data collection endpoint with format `sentinel-dcr-` using the scripts in [install-pack-v0_7_0/scripts]({{deployment-script-zip-url}}).\n2) Deploy the Azure Function from the repo`s Function folder (Timer-trigger; schedule typically 5–15 minutes).\n3) Configure Function App settings:\n - `CyeraBaseUrl` — Cyera API Base URL\n - `CyeraClientId` — Client ID (PAT)\n - `CyeraSecret` — Client Secret (PAT)\n - `DCR_IMMUTABLE_ID` — DCR immutable ID\n - `DCE_ENDPOINT` — Logs ingestion endpoint URL\n - `STREAM_ASSETS`=`Custom-CyeraAssets`, `STREAM_IDENTITIES`=`Custom-CyeraIdentities`, `STREAM_ISSUES`=`Custom-CyeraIssues`, `STREAM_CLASSIFICATIONS`=`Custom-CyeraClassifications`\n4) Save and Start the Function App. [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/cyfirmaattacksurfacealertsconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/cyfirmaattacksurfacealertsconnector.md index 94974f1dd2a..9d8ed281781 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/cyfirmaattacksurfacealertsconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/cyfirmaattacksurfacealertsconnector.md @@ -8,4 +8,21 @@ | **Used in Solutions** | [Cyfirma Attack Surface](../solutions/cyfirma-attack-surface.md) | | **Connector Definition Files** | [CyfirmaASAlerts_DataConnectorDefinition.json](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface/Data%20Connectors/CyfirmaASAlerts_ccp/CyfirmaASAlerts_DataConnectorDefinition.json) | +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. CYFIRMA Attack Surface** + +Connect to CYFIRMA Attack Surface to ingest alerts into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into custom tables during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency. +- **CYFIRMA API URL**: https://decyfir.cyfirma.com +- **CYFIRMA API Key**: (password field) +- **API Delta**: API Delta +- Click 'Connect' to establish connection + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/cyfirmabrandintelligencealertsdc.md b/Tools/Solutions Analyzer/connector-docs/connectors/cyfirmabrandintelligencealertsdc.md index 61587427729..70826cef7fc 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/cyfirmabrandintelligencealertsdc.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/cyfirmabrandintelligencealertsdc.md @@ -8,4 +8,21 @@ | **Used in Solutions** | [Cyfirma Brand Intelligence](../solutions/cyfirma-brand-intelligence.md) | | **Connector Definition Files** | [CyfirmaBIAlerts_DataConnectorDefinition.json](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence/Data%20Connectors/CyfirmaBIAlerts_ccp/CyfirmaBIAlerts_DataConnectorDefinition.json) | +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. CYFIRMA Brand Intelligence** + +Connect to CYFIRMA Brand Intelligence to ingest alerts data into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT Alerts API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into custom tables during ingestion. This enhances performance and efficiency by eliminating the need for query-time parsing. +- **CYFIRMA API URL**: https://decyfir.cyfirma.com +- **CYFIRMA API Key**: (password field) +- **API Delta**: API Delta +- Click 'Connect' to establish connection + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/cyfirmacompromisedaccountsdataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/cyfirmacompromisedaccountsdataconnector.md index 3dfd221795b..9fd45e42bca 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/cyfirmacompromisedaccountsdataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/cyfirmacompromisedaccountsdataconnector.md @@ -10,4 +10,21 @@ The CYFIRMA Compromised Accounts data connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR/DeTCT API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. CYFIRMA Compromised Accounts** + +The CYFIRMA Compromised Accounts Data Connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR/DeTCT API to retrieve logs. Additionally, it supports DCR-based ingestion time transformations, which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency. +- **CYFIRMA API URL**: https://decyfir.cyfirma.com +- **CYFIRMA API Key**: (password field) +- **API Delta**: API Delta +- Click 'Connect' to establish connection + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/cyfirmacyberintelligencedc.md b/Tools/Solutions Analyzer/connector-docs/connectors/cyfirmacyberintelligencedc.md index 7195553afa5..b42c3782243 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/cyfirmacyberintelligencedc.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/cyfirmacyberintelligencedc.md @@ -10,4 +10,24 @@ The CYFIRMA Cyber Intelligence data connector enables seamless log ingestion from the DeCYFIR API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. CYFIRMA Cyber Intelligence** + +This connector provides the Indicators, Threat actors, Malware and Campaigns logs from CYFIRMA Cyber Intelligence. The connector uses the DeCYFIR API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency. +- **CYFIRMA API URL**: https://decyfir.cyfirma.com +- **CYFIRMA API Key**: (password field) +- **Pull all IoC's Or Tailored IoC's**: All IoC's or Tailored IoC's +- **API Delta**: API Delta +- **Recommended Actions**: Recommended Action can be any one of:All/Monitor/Block +- **Threat Actor Associated**: Is any Threat Actor Associated with the IoC's +- Click 'Connect' to establish connection + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/cyfirmadigitalriskalertsconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/cyfirmadigitalriskalertsconnector.md index 0247311faa3..77c947c0f12 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/cyfirmadigitalriskalertsconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/cyfirmadigitalriskalertsconnector.md @@ -10,4 +10,21 @@ The CYFIRMA Digital Risk Alerts data connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. CYFIRMA Digital Risk** + +Connect to CYFIRMA Digital Risk Alerts to ingest logs into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve alerts and supports DCR-based ingestion time transformations for efficient log parsing. +- **CYFIRMA API URL**: https://decyfir.cyfirma.com +- **CYFIRMA API Key**: (password field) +- **API Delta**: API Delta +- Click 'Connect' to establish connection + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/cyfirmavulnerabilitiesinteldc.md b/Tools/Solutions Analyzer/connector-docs/connectors/cyfirmavulnerabilitiesinteldc.md index e676d62d3f6..12221e56524 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/cyfirmavulnerabilitiesinteldc.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/cyfirmavulnerabilitiesinteldc.md @@ -10,4 +10,24 @@ The CYFIRMA Vulnerabilities Intelligence data connector enables seamless log ingestion from the DeCYFIR API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the CYFIRMA API's to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. CYFIRMA Vulnerabilities Intelligence** + +This connector provides the Vulnerabilities logs from CYFIRMA Vulnerabilities Intelligence. The connector uses the DeCYFIR API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency. +- **CYFIRMA API URL**: https://decyfir.cyfirma.com +- **CYFIRMA API Key**: (password field) +- **API Delta**: API Delta +- **Vendor-Associated Vulnerabilities** +- **Product-Associated Vulnerabilities** +- **Product with Version-Associated Vulnerabilities** +- Click 'Connect' to establish connection + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/cyneriosecurityevents.md b/Tools/Solutions Analyzer/connector-docs/connectors/cyneriosecurityevents.md index 9f7d47ed4e4..ad5d83d864d 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/cyneriosecurityevents.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/cyneriosecurityevents.md @@ -10,4 +10,30 @@ The [Cynerio](https://www.cynerio.com/) connector allows you to easily connect your Cynerio Security Events with Microsoft Sentinel, to view IDS Events. This gives you more insight into your organization network security posture and improves your security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Configure and connect Cynerio** + +Cynerio can integrate with and export events directly to Microsoft Sentinel via Azure Server. Follow these steps to establish integration: + +1. In the Cynerio console, go to Settings > Integrations tab (default), and click on the **+Add Integration** button at the top right. + +2. Scroll down to the **SIEM** section. + +3. On the Microsoft Sentinel card, click the Connect button. + +4. The Integration Details window opens. Use the parameters below to fill out the form and set up the connection. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/darktrace.md b/Tools/Solutions Analyzer/connector-docs/connectors/darktrace.md index 2441b6d1441..bc25ee29b2c 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/darktrace.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/darktrace.md @@ -10,4 +10,69 @@ The Darktrace connector lets users connect Darktrace Model Breaches in real-time with Microsoft Sentinel, allowing creation of custom Dashboards, Workbooks, Notebooks and Custom Alerts to improve investigation. Microsoft Sentinel's enhanced visibility into Darktrace logs enables monitoring and mitigation of security threats. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Common Event Format (CEF) logs to Syslog agent** + +Configure Darktrace to forward Syslog messages in CEF format to your Azure workspace via the Syslog agent. + + 1) Within the Darktrace Threat Visualizer, navigate to the System Config page in the main menu under Admin. + + 2) From the left-hand menu, select Modules and choose Microsoft Sentinel from the available Workflow Integrations.\n 3) A configuration window will open. Locate Microsoft Sentinel Syslog CEF and click New to reveal the configuration settings, unless already exposed. + + 4) In the Server configuration field, enter the location of the log forwarder and optionally modify the communication port. Ensure that the port selected is set to 514 and is allowed by any intermediary firewalls. + + 5) Configure any alert thresholds, time offsets or additional settings as required. + + 6) Review any additional configuration options you may wish to enable that alter the Syslog syntax. + + 7) Enable Send Alerts and save your changes. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/darktraceama.md b/Tools/Solutions Analyzer/connector-docs/connectors/darktraceama.md index 6fd24eb70fd..b82121e02a1 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/darktraceama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/darktraceama.md @@ -10,4 +10,70 @@ The Darktrace connector lets users connect Darktrace Model Breaches in real-time with Microsoft Sentinel, allowing creation of custom Dashboards, Workbooks, Notebooks and Custom Alerts to improve investigation. Microsoft Sentinel's enhanced visibility into Darktrace logs enables monitoring and mitigation of security threats. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Common Event Format (CEF) logs to Syslog agent** + + Configure Darktrace to forward Syslog messages in CEF format to your Azure workspace via the Syslog agent. + + 1) Within the Darktrace Threat Visualizer, navigate to the System Config page in the main menu under Admin. + + 2) From the left-hand menu, select Modules and choose Microsoft Sentinel from the available Workflow Integrations.\n 3) A configuration window will open. Locate Microsoft Sentinel Syslog CEF and click New to reveal the configuration settings, unless already exposed. + + 4) In the Server configuration field, enter the location of the log forwarder and optionally modify the communication port. Ensure that the port selected is set to 514 and is allowed by any intermediary firewalls. + + 5) Configure any alert thresholds, time offsets or additional settings as required. + + 6) Review any additional configuration options you may wish to enable that alter the Syslog syntax. + + 7) Enable Send Alerts and save your changes. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/darktracerestconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/darktracerestconnector.md index 491d3f97c4b..5c6e84199c2 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/darktracerestconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/darktracerestconnector.md @@ -10,4 +10,37 @@ The Darktrace REST API connector pushes real-time events from Darktrace to Microsoft Sentinel and is designed to be used with the Darktrace Solution for Sentinel. The connector writes logs to a custom log table titled "darktrace_model_alerts_CL"; Model Breaches, AI Analyst Incidents, System Alerts and Email Alerts can be ingested - additional filters can be set up on the Darktrace System Configuration page. Data is pushed to Sentinel from Darktrace masters. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Darktrace Prerequisites**: To use this Data Connector a Darktrace master running v5.2+ is required. + Data is sent to the [Azure Monitor HTTP Data Collector API](https://docs.microsoft.com/azure/azure-monitor/logs/data-collector-api) over HTTPs from Darktrace masters, therefore outbound connectivity from the Darktrace master to Microsoft Sentinel REST API is required. +- **Filter Darktrace Data**: During configuration it is possible to set up additional filtering on the Darktrace System Configuration page to constrain the amount or types of data sent. +- **Try the Darktrace Sentinel Solution**: You can get the most out of this connector by installing the Darktrace Solution for Microsoft Sentinel. This will provide workbooks to visualise alert data and analytics rules to automatically create alerts and incidents from Darktrace Model Breaches and AI Analyst incidents. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +1. Detailed setup instructions can be found on the Darktrace Customer Portal: https://customerportal.darktrace.com/product-guides/main/microsoft-sentinel-introduction + 2. Take note of the Workspace ID and the Primary key. You will need to enter these details on your Darktrace System Configuration page. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**2. Darktrace Configuration** + +1. Perform the following steps on the Darktrace System Configuration page: + 2. Navigate to the System Configuration Page (Main Menu > Admin > System Config) + 3. Go into Modules configuration and click on the "Microsoft Sentinel" configuration card + 4. Select "HTTPS (JSON)" and hit "New" + 5. Fill in the required details and select appropriate filters + 6. Click "Verify Alert Settings" to attempt authentication and send out a test alert + 7. Run a "Look for Test Alerts" sample query to validate that the test alert has been received + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/datalake2sentinelconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/datalake2sentinelconnector.md index b51c279c87d..477e6b3bec4 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/datalake2sentinelconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/datalake2sentinelconnector.md @@ -10,4 +10,19 @@ This solution installs the Datalake2Sentinel connector which is built using the Codeless Connector Platform and allows you to automatically ingest threat intelligence indicators from **Datalake Orange Cyberdefense's CTI platform** into Microsoft Sentinel via the Upload Indicators REST API. After installing the solution, configure and enable this data connector by following guidance in Manage solution view. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Installation and setup instructions** + +Use the documentation from this Github repository to install and configure the Datalake to Microsoft Sentinel connector. + +https://github.com/cert-orangecyberdefense/datalake2sentinel + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/dataminrpulsealerts.md b/Tools/Solutions Analyzer/connector-docs/connectors/dataminrpulsealerts.md index e4a75b7cd19..e32bb762974 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/dataminrpulsealerts.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/dataminrpulsealerts.md @@ -10,4 +10,238 @@ Dataminr Pulse Alerts Data Connector brings our AI-powered real-time intelligence into Microsoft Sentinel for faster threat detection and response. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Azure Subscription**: Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group. +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Required Dataminr Credentials/permissions**: + +a. Users must have a valid Dataminr Pulse API **client ID** and **secret** to use this data connector. + + b. One or more Dataminr Pulse Watchlists must be configured in the Dataminr Pulse website. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the DataminrPulse in which logs are pushed via Dataminr RTAP and it will ingest logs into Microsoft Sentinel. Furthermore, the connector will fetch the ingested data from the custom logs table and create Threat Intelligence Indicators into Microsoft Sentinel Threat Intelligence. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1- Credentials for the Dataminr Pulse Client ID and Client Secret** + + * Obtain Dataminr Pulse user ID/password and API client ID/secret from your Dataminr Customer Success Manager (CSM). + +**STEP 2- Configure Watchlists in Dataminr Pulse portal.** + + Follow the steps in this section to configure watchlists in portal: + + 1. **Login** to the Dataminr Pulse [website](https://app.dataminr.com). + + 2. Click on the settings gear icon, and select **Manage Lists**. + + 3. Select the type of Watchlist you want to create (Cyber, Topic, Company, etc.) and click the **New List** button. + + 4. Provide a **name** for your new Watchlist, and select a highlight color for it, or keep the default color. + + 5. When you are done configuring the Watchlist, click **Save** to save it. + +**STEP 3 - App Registration steps for the Application in Microsoft Entra ID** + + This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID: + 1. Sign in to the [Azure portal](https://portal.azure.com/). + 2. Search for and select **Microsoft Entra ID**. + 3. Under **Manage**, select **App registrations > New registration**. + 4. Enter a display **Name** for your application. + 5. Select **Register** to complete the initial app registration. + 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of DataminrPulse Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app) + +**STEP 4 - Add a client secret for application in Microsoft Entra ID** + + Sometimes called an application password, a client secret is a string value required for the execution of DataminrPulse Data Connector. Follow the steps in this section to create a new Client Secret: + 1. In the Azure portal, in **App registrations**, select your application. + 2. Select **Certificates & secrets > Client secrets > New client secret**. + 3. Add a description for your client secret. + 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months. + 5. Select **Add**. + 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of DataminrPulse Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret) + +**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID** + + Follow the steps in this section to assign the role: + 1. In the Azure portal, Go to **Resource Group** and select your resource group. + 2. Go to **Access control (IAM)** from left panel. + 3. Click on **Add**, and then select **Add role assignment**. + 4. Select **Contributor** as role and click on next. + 5. In **Assign access to**, select `User, group, or service principal`. + 6. Click on **add members** and type **your app name** that you have created and select it. + 7. Now click on **Review + assign** and then again click on **Review + assign**. + +> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal) + +**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Dataminr Pulse Microsoft Sentinel data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**7. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the DataminrPulse connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-DataminrPulseAlerts-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-DataminrPulseAlerts-azuredeploy-gov) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the below information : + + a. **Function Name** + + b. **Location**: The location in which the data collection rules and data collection endpoints should be deployed. + + c. **Workspace**: Enter Workspace ID of log analytics Workspace ID + + d. **Workspace Key**: Enter Primary Key of log analytics Workspace + + e. **DataminrBaseURL**: Enter Base URL starting with "https://" followed by hostname (Example: https://gateway.dataminr.com/) + + f. **ClientId**: Enter your Dataminr account Client ID + + g. **ClientSecret**: Enter your Dataminr account Client Secret + + h. **AzureEntraObjectID**: Enter Object id of your Microsoft Entra App + + i. **AlertsTableName**: Enter name of the table used to store Dataminr Alerts logs. Default is 'DataminrPulse_Alerts' + + j. **AzureClientId**: Enter Azure Client ID that you have created during app registration + + k. **AzureClientSecret**: Enter Azure Client Secret that you have created during creating the client secret + + l. **AzureTenantId**: Enter Azure Tenant ID of your Azure Active Directory + + m. **AzureResourceGroupName**: Enter Azure Resource Group Name in which you want deploy the data connector + + n. **AzureWorkspaceName**: Enter Microsoft Sentinel Workspace Name of Log Analytics workspace + + o. **AzureSubscriptionId**: Enter Azure Subscription Id which is present in the subscription tab in Microsoft Sentinel + + p. **LogLevel**: Add log level or log severity value. Default is 'INFO' + + q. **Schedule**: Enter a valid Quartz Cron-Expression (Example: 0 0 0 * * *) +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**8. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Dataminr Pulse Microsoft Sentinel data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**9. 1) Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-DataminrPulseAlerts-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. DmPulseXXXXX). + + e. **Select a runtime:** Choose Python 3.8 or above. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**10. 2) Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective values (case-sensitive): + + a. **Function Name** + + b. **Location**: The location in which the data collection rules and data collection endpoints should be deployed. + + c. **Workspace**: Enter Workspace ID of log analytics Workspace ID + + d. **Workspace Key**: Enter Primary Key of log analytics Workspace + + e. **DataminrBaseURL**: Enter Base URL starting with "https://" followed by hostname (Example: https://gateway.dataminr.com/) + + f. **ClientId**: Enter your Dataminr account Client ID + + g. **ClientSecret**: Enter your Dataminr account Client Secret + + h. **AzureEntraObjectID**: Enter Object id of your Microsoft Entra App + + i. **AlertsTableName**: Enter name of the table used to store Dataminr Alerts logs. Default is 'DataminrPulse_Alerts' + + j. **AzureClientId**: Enter Azure Client ID that you have created during app registration + + k. **AzureClientSecret**: Enter Azure Client Secret that you have created during creating the client secret + + l. **AzureTenantId**: Enter Azure Tenant ID of your Azure Active Directory + + m. **AzureResourceGroupName**: Enter Azure Resource Group Name in which you want deploy the data connector + + n. **AzureWorkspaceName**: Enter Microsoft Sentinel Workspace Name of Log Analytics workspace + + o. **AzureSubscriptionId**: Enter Azure Subscription Id which is present in the subscription tab in Microsoft Sentinel + + p. **LogLevel**: Add log level or log severity value. Default is 'INFO' + + q. **Schedule**: Enter a valid Quartz Cron-Expression (Example: 0 0 0 * * *) + + r. **logAnalyticsUri** (optional) + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + +**STEP 7 - Post Deployment steps** + +**12. 1) Get the Function app endpoint** + +1. Go to Azure function Overview page and Click on **"Functions"** in the left blade. +2. Click on the function called **"DataminrPulseAlertsHttpStarter"**. +3. Go to **"GetFunctionurl"** and copy the function url. +4. Replace **{functionname}** with **"DataminrPulseAlertsSentinelOrchestrator"** in copied function url. + +**13. 2) To add integration settings in Dataminr RTAP using the function URL** + +1. Open any API request tool like Postman. +2. Click on '+' to create a new request. +3. Select HTTP request method as **'POST'**. +4. Enter the url prepapred in **point 1)**, in the request URL part. +5. In Body, select raw JSON and provide request body as below(case-sensitive): + { + "integration-settings": "ADD", + "url": "`(URL part from copied Function-url)`", + "token": "`(value of code parameter from copied Function-url)`" + } +6. After providing all required details, click **Send**. +7. You will receive an integration setting ID in the HTTP response with a status code of 200. +8. Save **Integration ID** for future reference. + +*Now we are done with the adding integration settings for Dataminr RTAP. Once the Dataminr RTAP send an alert data, Function app is triggered and you should be able to see the Alerts data from the Dataminr Pulse into LogAnalytics workspace table called "DataminrPulse_Alerts_CL".* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ddos.md b/Tools/Solutions Analyzer/connector-docs/connectors/ddos.md index 59a94f8be15..5bf96d37d5b 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ddos.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ddos.md @@ -10,4 +10,34 @@ Connect to Azure DDoS Protection Standard logs via Public IP Address Diagnostic Logs. In addition to the core DDoS protection in the platform, Azure DDoS Protection Standard provides advanced DDoS mitigation capabilities against network attacks. It's automatically tuned to protect your specific Azure resources. Protection is simple to enable during the creation of new virtual networks. It can also be done after creation and requires no application or resource changes. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2219760&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. + +**Custom Permissions:** +- **Azure DDoS protection plan**: A configured Azure DDoS Standard protection plan [read more about Azure DDoS protection plans](https://docs.microsoft.com/azure/virtual-network/manage-ddos-protection#create-a-ddos-protection-plan). +- **Enabled Azure DDoS for virtual network**: A configured virtual network with Azure DDoS Standard enabled [read more about configuring virtual network with Azure DDoS](https://docs.microsoft.com/azure/virtual-network/manage-ddos-protection#enable-ddos-for-an-existing-virtual-network). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Azure DDoS Protection to Microsoft Sentinel** + +Enable Diagnostic Logs on All Public IP Addresses. +- **Open Azure Monitoring** + +**2. Inside your Diagnostics settings portal, select your Public IP Address resource:** + +Inside your Public IP Address resource: + +1. Select **+ Add diagnostic setting.​** +2. In the **Diagnostic setting** blade: + - Type a **Name**, within the **Diagnostics settings** name field. + - Select **Send to Log Analytics**. + - Choose the log destination workspace. + - Select the categories that you want to analyze (recommended: DDoSProtectionNotifications, DDoSMitigationFlowLogs, DDoSMitigationReports) + - Click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/delineasecretserver-cef.md b/Tools/Solutions Analyzer/connector-docs/connectors/delineasecretserver-cef.md index 27faaac8b59..382948ada38 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/delineasecretserver-cef.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/delineasecretserver-cef.md @@ -10,4 +10,62 @@ Common Event Format (CEF) from Delinea Secret Server +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Delinea Secret Server**: must be configured to export logs via Syslog + + [Learn more about configure Secret Server](https://thy.center/ss/link/syslog) + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Common Event Format (CEF) logs to Syslog agent** + +Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/delineasecretserverama.md b/Tools/Solutions Analyzer/connector-docs/connectors/delineasecretserverama.md index f800f41f8d4..ec4e9b80c3e 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/delineasecretserverama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/delineasecretserverama.md @@ -10,4 +10,58 @@ Common Event Format (CEF) from Delinea Secret Server +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Common Event Format (CEF) logs to Syslog agent** + + Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/derdacksignl4.md b/Tools/Solutions Analyzer/connector-docs/connectors/derdacksignl4.md index b98935a1efd..0a906bd7998 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/derdacksignl4.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/derdacksignl4.md @@ -14,4 +14,54 @@ When critical systems fail or security incidents happen, SIGNL4 bridges the ‘l [Learn more >](https://www.signl4.com) +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector is mainly configured on the SIGNL4 side. You can find a description video here: [**Integrate SIGNL4 with Microsoft Sentinel**](https://www.signl4.com/blog/portfolio_item/azure-sentinel-mobile-alert-notification-duty-schedule-escalation/). + +>**SIGNL4 Connector:** The SIGNL4 connector for Microsoft Sentinel, Azure Security Center and other Azure Graph Security API providers provides seamless 2-way integration with your Azure Security solutions. Once added to your SIGNL4 team, the connector will read security alerts from Azure Graph Security API and fully automatically and trigger alert notifications to your team members on duty. It will also synchronize the alert status from SIGNL4 to Graph Security API, so that if alerts are acknowledged or closed, this status is also updated on the according Azure Graph Security API alert or the corresponding security provider. As mentioned, the connector mainly uses Azure Graph Security API, but for some security providers, such as Microsoft Sentinel, it also uses dedicated REST APIs from according Azure solutions. + +**1. Microsoft Sentinel Features** + +Microsoft Sentinel is a cloud native SIEM solution from Microsoft and a security alert provider in Azure Graph Security API. However, the level of alert details available with the Graph Security API is limited for Microsoft Sentinel. The connector can therefore augment alerts with further details (insights rule search results), from the underlying Microsoft Sentinel Log Analytics workspace. To be able to do that, the connector communicates with Azure Log Analytics REST API and needs according permissions (see below). Furthermore, the app can also update the status of Microsoft Sentinel incidents, when all related security alerts are e.g. in progress or resolved. In order to be able to do that, the connector needs to be a member of the 'Microsoft Sentinel Contributors' group in your Azure Subscription. + **Automated deployment in Azure** + The credentials required to access the beforementioned APIs, are generated by a small PowerShell script that you can download below. The script performs the following tasks for you: + - Logs you on to your Azure Subscription (please login with an administrator account) + - Creates a new enterprise application for this connector in your Azure AD, also referred to as service principal + - Creates a new role in your Azure IAM that grants read/query permission to only Azure Log Analytics workspaces. + - Joins the enterprise application to that user role + - Joins the enterprise application to the 'Microsoft Sentinel Contributors' role + - Outputs some data that you need to configure app (see below) + +**2. Deployment procedure** + +1. Download the PowerShell deployment script from [here](https://github.com/signl4/signl4-integration-azuresentinel/blob/master/registerSIGNL4Client.ps1). +2. Review the script and the roles and permission scopes it deploys for the new app registration. If you don't want to use the connector with Microsoft Sentinel, you could remove all role creation and role assignment code and only use it to create the app registration (SPN) in your Azure Active Directory. +3. Run the script. At the end it outputs information that you need to enter in the connector app configuration. +4. In Azure AD, click on 'App Registrations'. Find the app with the name 'SIGNL4AzureSecurity' and open its details +5. On the left menu blade click 'API Permissions'. Then click 'Add a permission'. +6. On the blade that loads, under 'Microsoft APIs' click on the 'Microsoft Graph' tile, then click 'App permission'. +7. In the table that is displayed expand 'SecurityEvents' and check 'SecurityEvents.Read.All' and 'SecurityEvents.ReadWrite.All'. +8. Click 'Add permissions'. + +**3. Configuring the SIGNL4 connector app** + +Finally, enter the IDs, that the script has outputted in the connector configuration: + - Azure Tenant ID + - Azure Subscription ID + - Client ID (of the enterprise application) + - Client Secret (of the enterprise application) + Once the app is enabled, it will start reading your Azure Graph Security API alerts. + +>**NOTE:** It will initially only read the alerts that have occurred within the last 24 hours. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/digitalguardiandlp.md b/Tools/Solutions Analyzer/connector-docs/connectors/digitalguardiandlp.md index 360ff5c6be7..1380e88c193 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/digitalguardiandlp.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/digitalguardiandlp.md @@ -10,4 +10,87 @@ [Digital Guardian Data Loss Prevention (DLP)](https://digitalguardian.com/platform-overview) data connector provides the capability to ingest Digital Guardian DLP logs into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**DigitalGuardianDLPEvent**](https://aka.ms/sentinel-DigitalGuardianDLP-parser) which is deployed with the Microsoft Sentinel Solution. + +**1. Configure Digital Guardian to forward logs via Syslog to remote server where you will install the agent.** + +Follow these steps to configure Digital Guardian to forward logs via Syslog: + +1.1. Log in to the Digital Guardian Management Console. + +1.2. Select **Workspace** > **Data Export** > **Create Export**. + +1.3. From the **Data Sources** list, select **Alerts** or **Events** as the data source. + +1.4. From the **Export type** list, select **Syslog**. + +1.5. From the **Type list**, select **UDP** or **TCP** as the transport protocol. + +1.6. In the **Server** field, type the IP address of your Remote Syslog server. + +1.7. In the **Port** field, type 514 (or other port if your Syslog server was configured to use non-default port). + +1.8. From the **Severity Level** list, select a severity level. + +1.9. Select the **Is Active** check box. + +1.9. Click **Next**. + +1.10. From the list of available fields, add Alert or Event fields for your data export. + +1.11. Select a Criteria for the fields in your data export and click **Next**. + +1.12. Select a group for the criteria and click **Next**. + +1.13. Click **Test Query**. + +1.14. Click **Next**. + +1.15. Save the data export. + +**2. Install and onboard the agent for Linux or Windows** + +Install the agent on the Server to which the logs will be forwarded. + +> Logs on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**3. Check logs in Microsoft Sentinel** + +Open Log Analytics to check if the logs are received using the Syslog schema. + +>**NOTE:** It may take up to 15 minutes before new logs will appear in Syslog table. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/digitalshadowssearchlightazurefunctions.md b/Tools/Solutions Analyzer/connector-docs/connectors/digitalshadowssearchlightazurefunctions.md index bd6d148da52..a9c4ffdbdf7 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/digitalshadowssearchlightazurefunctions.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/digitalshadowssearchlightazurefunctions.md @@ -10,4 +10,90 @@ The Digital Shadows data connector provides ingestion of the incidents and alerts from Digital Shadows Searchlight into the Microsoft Sentinel using the REST API. The connector will provide the incidents and alerts information such that it helps to examine, diagnose and analyse the potential security risks and threats. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **Digital Shadows account ID, secret and key** is required. See the documentation to learn more about API on the `https://portal-digitalshadows.com/learn/searchlight-api/overview/description`. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to a 'Digital Shadows Searchlight' to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Configuration steps for the 'Digital Shadows Searchlight' API** + +The provider should provide or link to detailed steps to configure the 'Digital Shadows Searchlight' API endpoint so that the Azure Function can authenticate to it successfully, get its authorization key or token, and pull the appliance's logs into Microsoft Sentinel. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the 'Digital Shadows Searchlight' connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the 'Digital Shadows Searchlight' API authorization key(s) or Token, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the 'Digital Shadows Searchlight' connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-Digitalshadows-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password**, 'and/or Other required fields'. +>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**Option 2 - Manual Deployment of Azure Functions** + + Use the following step-by-step instructions to deploy the 'Digital Shadows Searchlight' connector manually with Azure Functions. + +**1. Create a Function App** + +1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp). +2. Click **+ Create** at the top. +3. In the **Basics** tab, ensure Runtime stack is set to **python 3.11**. +4. In the **Hosting** tab, ensure **Plan type** is set to **'Consumption (Serverless)'**. +5.select Storage account +6. 'Add other required configurations'. +5. 'Make other preferable configuration changes', if needed, then click **Create**. + +**2. Import Function App Code(Zip deployment)** + +1. Install Azure CLI +2. From terminal type **az functionapp deployment source config-zip -g -n --src ** and hit enter. Set the `ResourceGroup` value to: your resource group name. Set the `FunctionApp` value to: your newly created function app name. Set the `Zip File` value to: `digitalshadowsConnector.zip`(path to your zip file). Note:- Download the zip file from the link - [Function App Code](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Digital%20Shadows/Data%20Connectors/Digital%20Shadows/digitalshadowsConnector.zip) + +**3. Configure the Function App** + +1. In the Function App screen, click the Function App name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following 'x (number of)' application settings individually, under Name, with their respective string values (case-sensitive) under Value: + DigitalShadowsAccountID + WorkspaceID + WorkspaceKey + DigitalShadowsKey + DigitalShadowsSecret + HistoricalDays + DigitalShadowsURL + ClassificationFilterOperation + HighVariabilityClassifications + FUNCTION_NAME + logAnalyticsUri (optional) +(add any other settings required by the Function App) +Set the `DigitalShadowsURL` value to: `https://api.searchlight.app/v1` +Set the `HighVariabilityClassifications` value to: `exposed-credential,marked-document` +Set the `ClassificationFilterOperation` value to: `exclude` for exclude function app or `include` for include function app +>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Azure Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: https://.ods.opinsights.azure.us. +4. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/dns.md b/Tools/Solutions Analyzer/connector-docs/connectors/dns.md index 44ed4dceceb..82ddbed580c 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/dns.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/dns.md @@ -28,4 +28,32 @@ The DNS log connector allows you to easily connect your DNS analytic and audit l For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220127&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. +- **Solutions** (ResourceGroup): [read and write permissions](https://docs.microsoft.com/azure/role-based-access-control/built-in-roles#log-analytics-contributor). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Download and install the agent** + +> DNS logs are collected only from **Windows** agents. +**Choose where to install the agent:** + +**Install agent on Azure Windows Virtual Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on non-Azure Windows Machine** + + Select the machine to install the agent and then click **Connect**. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Install DNS solution** +- Install solution: DnsAnalytics + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/doppel-dataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/doppel-dataconnector.md index b6c462e7d90..3d0bce272af 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/doppel-dataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/doppel-dataconnector.md @@ -10,4 +10,112 @@ The data connector is built on Microsoft Sentinel for Doppel events and alerts and supports DCR-based [ingestion time transformations](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/ingestion-time-transformations) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. + +**Custom Permissions:** +- **Microsoft Entra Tenant ID, Client ID and Client Secret**: Microsoft Entra ID requires a Client ID and Client Secret to authenticate your application. Additionally, Global Admin/Owner level access is required to assign the Entra-registered application a Resource Group Monitoring Metrics Publisher role. +- **Requires Workspace ID, DCE-URI, DCR-ID**: You will need to get the Log Analytics Workspace ID, DCE Logs Ingestion URI and DCR Immutable ID for the configuration. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Configure Doppel Webhook** + +Configure the Webhook in Doppel and Endpoint with permissions in Microsoft Sentinel to send data. +**Register the Application in Microsoft Entra ID** + + 1. **Open the [Microsoft Entra ID page](https://entra.microsoft.com/)**: + - Click the provided link to open the **Microsoft Entra ID** registration page in a new tab. + - Ensure you are logged in with an account that has **Admin level** permissions. + +2. **Create a New Application**: + - In the **Microsoft Entra ID portal**, select **App registrations** mentioned on the left-hand side tab. + - Click on **+ New registration**. + - Fill out the following fields: + - **Name**: Enter a name for the app (e.g., “Doppel App”). + - **Supported account types**: Choose **Accounts in this organizational directory only** (Default Directory only - Single tenant). + - **Redirect URI**: Leave this blank unless required otherwise. + - Click **Register** to create the application. + +3. **Copy Application and Tenant IDs**: + - Once the app is registered, note the **Application (client) ID** and **Directory (tenant) ID** from the **Overview** page. You’ll need these for the integration. + +4. **Create a Client Secret**: + - In the **Certificates & secrets** section, click **+ New client secret**. + - Add a description (e.g., 'Doppel Secret') and set an expiration (e.g., 1 year). + - Click **Add**. + - **Copy the client secret value immediately**, as it will not be shown again. + + **Assign the "Monitoring Metrics Publisher" Role to the App** + + 1. **Open the Resource Group in Azure Portal**: + - Navigate to the **Resource Group** that contains the **Log Analytics Workspace** and **Data Collection Rules (DCRs)** where you want the app to push data. + +2. **Assign the Role**: + - In the **Resource Group** menu, click on **Access control (IAM)** mentioned on the left-hand side tab .. + - Click on **+ Add** and select **Add role assignment**. + - In the **Role** dropdown, search for and select the **Monitoring Metrics Publisher** role. + - Under **Assign access to**, choose **Azure AD user, group, or service principal**. + - In the **Select** field, search for your registered app by **name** or **client ID**. + - Click **Save** to assign the role to the application. + + **Deploy the ARM Template** + + 1. **Retrieve the Workspace ID**: + - After assigning the role, you will need the **Workspace ID**. + - Navigate to the **Log Analytics Workspace** within the **Resource Group**. + - In the **Overview** section, locate the **Workspace ID** field under **Workspace details**. + - **Copy the Workspace ID** and keep it handy for the next steps. + +2. **Click the Deploy to Azure Button**: + - [![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fmetron-labs%2FAzure-Sentinel%2Frefs%2Fheads%2FDoppelSolution%2FSolutions%2FDoppel%2FData%2520Connectors%2FDeployToAzure.json). + - This will take you directly to the Azure portal to start the deployment. + +3. **Review and Customize Parameters**: + - On the custom deployment page, ensure you’re deploying to the correct **subscription** and **resource group**. + - Fill in the parameters like **workspace name**, **workspace ID**, and **workspace location**. + +4. **Click Review + Create** and then **Create** to deploy the resources. + + **Verify DCE, DCR, and Log Analytics Table Setup** + + 1. **Check the Data Collection Endpoint (DCE)**: + - After deploying, go to **Azure Portal > Data Collection Endpoints**. + - Verify that the **DoppelDCE** endpoint has been created successfully. + - **Copy the DCE Logs Ingestion URI**, as you’ll need this for generating the webhook URL. + +2. **Confirm Data Collection Rule (DCR) Setup**: + - Go to **Azure Portal > Data Collection Rules**. + - Ensure the **DoppelDCR** rule is present. + - **Copy the Immutable ID** of the DCR from the Overview page, as you’ll need it for the webhook URL. + +3. **Validate Log Analytics Table**: + - Navigate to your **Log Analytics Workspace** (linked to Microsoft Sentinel). + - Under the **Tables** section, verify that the **DoppelTable_CL** table has been created successfully and is ready to receive data. + + **Integrate Doppel Alerts with Microsoft Sentinel** + + 1. **Gather Necessary Information**: + - Collect the following details required for integration: + - **Data Collection Endpoint ID (DCE-ID)** + - **Data Collection Rule ID (DCR-ID)** + - **Microsoft Entra Credentials**: Tenant ID, Client ID, and Client Secret. + +2. **Coordinate with Doppel Support**: + - Share the collected DCE-ID, DCR-ID, and Microsoft Entra credentials with Doppel support. + - Request assistance to configure these details in the Doppel tenant to enable webhook setup. + +3. **Webhook Setup by Doppel**: + - Doppel will use the provided Resource IDs and credentials to configure a webhook. + - This webhook will facilitate the forwarding of alerts from Doppel to Microsoft Sentinel. + +4. **Verify Alert Delivery in Microsoft Sentinel**: + - Check that alerts from Doppel are successfully forwarded to Microsoft Sentinel. + - Validate that the **Workbook** in Microsoft Sentinel is updated with the alert statistics, ensuring seamless data integration. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/dragossitestoreccp.md b/Tools/Solutions Analyzer/connector-docs/connectors/dragossitestoreccp.md index 166a58f2225..c159e9b1b0c 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/dragossitestoreccp.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/dragossitestoreccp.md @@ -10,4 +10,24 @@ The [Dragos Platform](https://www.dragos.com/) is the leading Industrial Cyber Security platform it offers a comprehensive Operational Technology (OT) cyber threat detection built by unrivaled industrial cybersecurity expertise. This solution enables Dragos Platform notification data to be viewed in Microsoft Sentinel so that security analysts are able to triage potential cyber security events occurring in their industrial environments. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **Dragos Sitestore API access**: A Sitestore user account that has the `notification:read` permission. This account also needs to have an API key that can be provided to Sentinel. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Please provide the following information to allow Microsoft Sentinel to connect to your Dragos Sitestore. +- **Dragos Sitestore Hostname**: dragossitestore.example.com +- **Dragos Sitestore API Key ID**: Enter the API key ID. +- **Dragos Sitestore API Key Secret**: (password field) +- **Minimum Notification Severity. Valid values are 0-5 inclusive. Ensure less than or equal to maximum severity.**: Enter the min severity (recommend 0 for all notifications) +- **Maximum Notification Severity. Valid values are 0-5 inclusive. Ensure greater than or equal to minimum severity.**: Enter the max severity (recommend 5 for all notifications) +- Click 'Connect to Sitestore' to establish connection + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/druvaeventccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/druvaeventccpdefinition.md index 2497ab0058c..64edb79c08d 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/druvaeventccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/druvaeventccpdefinition.md @@ -10,4 +10,33 @@ Provides capability to ingest the Druva events from Druva APIs +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permission are required + +**Custom Permissions:** +- **Druva API Access**: Druva API requires a client id and client secret to authenticate + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>Note: Configurations to connect to Druva Rest API + +Step 1: Create credentials from Druva console. Refer this doc for steps:- https://help.druva.com/en/articles/8580838-create-and-manage-api-credentials + +Step 2: Enter the hostname. For public cloud its apis.druva.com + +Step 3: Enter client id and client secret key + +**4. Connect to Druva API to start collecting logs in Microsoft Sentinel** + +Provide required values: +- **Hostname**: Example: apis.druva.com +- **OAuth Configuration**: + - Client ID + - Client Secret + - Click 'Connect' to authenticate + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/dynamics365.md b/Tools/Solutions Analyzer/connector-docs/connectors/dynamics365.md index 0ab3b56c1f8..034ae8ec986 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/dynamics365.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/dynamics365.md @@ -10,4 +10,20 @@ The Dynamics 365 Common Data Service (CDS) activities connector provides insight into admin, user, and support activities, as well as Microsoft Social Engagement logging events. By connecting Dynamics 365 CRM logs into Microsoft Sentinel, you can view this data in workbooks, use it to create custom alerts, and improve your investigation process. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com//fwlink/p/?linkid=2226719&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. + +**Custom Permissions:** +- **Tenant Permissions**: 'Security Administrator' or 'Global Administrator' on the workspace's tenant. +- **License**: [Microsoft Dynamics 365 production license](https://docs.microsoft.com/office365/servicedescriptions/microsoft-dynamics-365-online-service-description) (This connector is available for production environments only, not for sandbox). Also, a Microsoft 365 Enterprise [E3 or E5](https://docs.microsoft.com/power-platform/admin/enable-use-comprehensive-auditing#requirements) subscription is required for Activity Logging. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Connect [Dynamics 365 CRM](https://aka.ms/Sentinel/Dynamics365) activity logs to your Microsoft Sentinel workspace. +- Connect Dynamics365 + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/dynamics365finance.md b/Tools/Solutions Analyzer/connector-docs/connectors/dynamics365finance.md index bb0ae99f762..880e34f70dd 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/dynamics365finance.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/dynamics365finance.md @@ -14,4 +14,81 @@ Dynamics 365 for Finance and Operations is a comprehensive Enterprise Resource P The Dynamics 365 Finance and Operations data connector ingests Dynamics 365 Finance and Operations admin activities and audit logs as well as user business process and application activities logs into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +**Custom Permissions:** +- **Microsoft Entra app registration**: Application client ID and secret used to access Dynamics 365 Finance and Operations. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>Connectivity to Finance and Operations requires a Microsoft Entra app registration (client ID and secret). You'll also need the Microsoft Entra tenant ID and the Finance Operations Organization URL. + +To enable data collection, create a role in Dynamics 365 Finance and Operations with permissions to view the Database Log entity. Assign this role to a dedicated Finance and Operations user, mapped to the client ID of a Microsoft Entra app registration. Follow these steps to complete the process: + +**2. Step 1 - Microsoft Entra app registration** + +1. Navigate to the [Microsoft Entra portal](https://entra.microsoft.com). +2. Under Applications, click on **App Registrations** and create a new app registration (leave all defaults). +3. Open the new app registration and create a new secret. +4. Retain the **Tenant ID**, **Application (client) ID**, and **Client secret** for later use. + +**3. Step 2 - Create a role for data collection in Finance and Operations** + +1. In the Finance and Operations portal, navigate to **Workspaces > System administration** and click **Security Configuration** +2. Under **Roles** click **Create new** and give the new role a name e.g. Database Log Viewer. +3. Select the new role in the list of roles and click **Privileges** and than **Add references**. +4. Select **Database log Entity View** from the list of privileges. +5. Click on **Unpublished objects** and then **Publish all** to publish the role. + +**4. Step 3 - Create a user for data collection in Finance and Operations** + +1. In the Finance and Operations portal, navigate to **Modules > System administration** and click **Users** +2. Create a new user and assign the role created in the previous step to the user. + +**5. Step 4 - Register the Microsoft Entra app in Finance and Operations** + +1. In the F&O portal, navigate to **System administration > Setup > Microsoft Entra applications** (Azure Active Directory applications) +2. Create a new entry in the table. In the **Client Id** field, enter the application ID of the app registered in Step 1. +3. In the **Name** field, enter a name for the application. +4. In the **User ID** field, select the user ID created in the previous step. + +**6. Connect events from Dyanmics 365 Finance and Operations to Microsoft Sentinel** + +Connect using client credentials +**Dynamics 365 Finance and Operations connection** + +When you click the "Add environment" button in the portal, a configuration form will open. You'll need to provide: + +*Environment details* + +- **Microsoft Entra tenant ID.** (optional): Tenant ID (GUID) +- **App registration client ID** (optional): Finance and Operations client ID +- **App registration client secret** (optional): Finance and Operations client secret +- **Finance and Operations organization URL** (optional): https://dynamics-dev.axcloud.dynamics.com + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + + +**7. Organizations** + +Each row represents an Finance and Operations connection +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Environment URL** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/dynatraceattacks.md b/Tools/Solutions Analyzer/connector-docs/connectors/dynatraceattacks.md index 16c1019ab77..859e484ecc0 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/dynatraceattacks.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/dynatraceattacks.md @@ -10,4 +10,23 @@ This connector uses the Dynatrace Attacks REST API to ingest detected attacks into Microsoft Sentinel Log Analytics +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Custom Permissions:** +- **Dynatrace tenant (ex. xyz.dynatrace.com)**: You need a valid Dynatrace tenant with [Application Security](https://www.dynatrace.com/platform/application-security/) enabled, learn more about the [Dynatrace platform](https://www.dynatrace.com/). +- **Dynatrace Access Token**: You need a Dynatrace Access Token, the token should have ***Read attacks*** (attacks.read) scope. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Dynatrace Attack Events to Microsoft Sentinel** + +Configure and Enable Dynatrace [Application Security](https://www.dynatrace.com/platform/application-security/). + Follow [these instructions](https://docs.dynatrace.com/docs/shortlink/token#create-api-token) to generate an access token. +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `APIKey`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/dynatraceauditlogs.md b/Tools/Solutions Analyzer/connector-docs/connectors/dynatraceauditlogs.md index 650ed8f12be..a93a397de29 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/dynatraceauditlogs.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/dynatraceauditlogs.md @@ -10,4 +10,23 @@ This connector uses the [Dynatrace Audit Logs REST API](https://docs.dynatrace.com/docs/dynatrace-api/environment-api/audit-logs) to ingest tenant audit logs into Microsoft Sentinel Log Analytics +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Custom Permissions:** +- **Dynatrace tenant (ex. xyz.dynatrace.com)**: You need a valid Dynatrace Tenant, to learn more about the Dynatrace platform [Start your free trial](https://www.dynatrace.com/trial). +- **Dynatrace Access Token**: You need a Dynatrace Access Token, the token should have ***Read audit logs*** (auditLogs.read) scope. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Dynatrace Audit Log Events to Microsoft Sentinel** + +Enable Dynatrace Audit [Logging](https://docs.dynatrace.com/docs/shortlink/audit-logs#enable-audit-logging). + Follow [these instructions](https://docs.dynatrace.com/docs/shortlink/token#create-api-token) to generate an access token. +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `APIKey`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/dynatraceproblems.md b/Tools/Solutions Analyzer/connector-docs/connectors/dynatraceproblems.md index dd29b6740c5..049b1a018b9 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/dynatraceproblems.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/dynatraceproblems.md @@ -10,4 +10,22 @@ This connector uses the [Dynatrace Problem REST API](https://docs.dynatrace.com/docs/dynatrace-api/environment-api/problems-v2) to ingest problem events into Microsoft Sentinel Log Analytics +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Custom Permissions:** +- **Dynatrace tenant (ex. xyz.dynatrace.com)**: You need a valid Dynatrace Tenant, to learn more about the Dynatrace platform [Start your free trial](https://www.dynatrace.com/trial). +- **Dynatrace Access Token**: You need a Dynatrace Access Token, the token should have ***Read problems*** (problems.read) scope. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Dynatrace Problem Events to Microsoft Sentinel** + +Follow [these instructions](https://docs.dynatrace.com/docs/shortlink/token#create-api-token) to generate an access token. +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `APIKey`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/dynatraceruntimevulnerabilities.md b/Tools/Solutions Analyzer/connector-docs/connectors/dynatraceruntimevulnerabilities.md index a7806a4830c..d19f48fd581 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/dynatraceruntimevulnerabilities.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/dynatraceruntimevulnerabilities.md @@ -10,4 +10,23 @@ This connector uses the [Dynatrace Security Problem REST API](https://docs.dynatrace.com/docs/dynatrace-api/environment-api/application-security/vulnerabilities/get-vulnerabilities) to ingest detected runtime vulnerabilities into Microsoft Sentinel Log Analytics. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Custom Permissions:** +- **Dynatrace tenant (ex. xyz.dynatrace.com)**: You need a valid Dynatrace tenant with [Application Security](https://www.dynatrace.com/platform/application-security/) enabled, learn more about the [Dynatrace platform](https://www.dynatrace.com/). +- **Dynatrace Access Token**: You need a Dynatrace Access Token, the token should have ***Read security problems*** (securityProblems.read) scope. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Dynatrace Vulnerabilities Events to Microsoft Sentinel** + +Configure and Enable Dynatrace [Application Security](https://www.dynatrace.com/platform/application-security/). + Follow [these instructions](https://docs.dynatrace.com/docs/shortlink/token#create-api-token) to generate an access token. +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `APIKey`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/egressdefendpolling.md b/Tools/Solutions Analyzer/connector-docs/connectors/egressdefendpolling.md index 98c727e1738..031c127234b 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/egressdefendpolling.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/egressdefendpolling.md @@ -10,4 +10,21 @@ The Egress Defend audit connector provides the capability to ingest Egress Defend Data into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions on the Log Analytics workspace are required to enable the data connector. + +**Custom Permissions:** +- **Egress API Token**: An Egress API token is required to ingest audit records to Microsoft Sentinel. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Egress Defend with Microsoft Sentinel** + +Enter your Egress Defend API URl, Egress Domain and API token. +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `APIKey`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/egresssiempolling.md b/Tools/Solutions Analyzer/connector-docs/connectors/egresssiempolling.md index 88066903391..1f44c9db03d 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/egresssiempolling.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/egresssiempolling.md @@ -10,4 +10,21 @@ The Egress Iris connector will allow you to ingest Egress data into Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions on the Log Analytics workspace are required to enable the data connector. + +**Custom Permissions:** +- **Egress API Token**: An Egress API token is required to ingest audit records to Microsoft Sentinel. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Egress Data with Microsoft Sentinel** + +Enter your Egress API Hostname and secret. +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `APIKey`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/elasticagent.md b/Tools/Solutions Analyzer/connector-docs/connectors/elasticagent.md index 0f53e7a1414..51aa30c4ef4 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/elasticagent.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/elasticagent.md @@ -10,4 +10,75 @@ The [Elastic Agent](https://www.elastic.co/security) data connector provides the capability to ingest Elastic Agent logs, metrics, and security data into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Include custom pre-requisites if the connectivity requires - else delete customs**: Description for any custom pre-requisite + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**ElasticAgentEvent**](https://aka.ms/sentinel-ElasticAgent-parser) which is deployed with the Microsoft Sentinel Solution. + +>**NOTE:** This data connector has been developed using **Elastic Agent 7.14**. + +**1. Install and onboard the agent for Linux or Windows** + +Install the agent on the Server where the Elastic Agent logs are forwarded. + +> Logs from Elastic Agents deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure Elastic Agent (Standalone)** + +[Follow the instructions](https://www.elastic.co/guide/en/fleet/current/elastic-agent-configuration.html) to configure Elastic Agent to output to Logstash + +**3. Configure Logstash to use Microsoft Logstash Output Plugin** + +Follow the steps to configure Logstash to use microsoft-logstash-output-azure-loganalytics plugin: + +3.1) Check if the plugin is already installed: +> ./logstash-plugin list | grep 'azure-loganalytics' +**(if the plugin is installed go to step 3.3)** + +3.2) Install plugin: +> ./logstash-plugin install microsoft-logstash-output-azure-loganalytics + +3.3) [Configure Logstash](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/microsoft-logstash-output-azure-loganalytics) to use the plugin + +**4. Validate log ingestion** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using custom table specified in step 3.3 (e.g. ElasticAgentLogs_CL). + +>It may take about 30 minutes until the connection streams data to your workspace. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ermesbrowsersecurityevents.md b/Tools/Solutions Analyzer/connector-docs/connectors/ermesbrowsersecurityevents.md index f84e7db974c..c970bef9ad4 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ermesbrowsersecurityevents.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ermesbrowsersecurityevents.md @@ -10,4 +10,25 @@ Ermes Browser Security Events +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +**Custom Permissions:** +- **Ermes Client Id and Client Secret**: Enable API access in Ermes. Please contact [Ermes Cyber Security](https://www.ermes.company) support for more information. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Ermes Browser Security Events to Microsoft Sentinel** + +Connect using OAuth2 credentials +- **OAuth Configuration**: + - Client ID + - Client Secret + - Click 'Connect' to authenticate + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/esetinspect.md b/Tools/Solutions Analyzer/connector-docs/connectors/esetinspect.md index 28fdaff26aa..bb1d3c0ca64 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/esetinspect.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/esetinspect.md @@ -10,4 +10,53 @@ This connector will ingest detections from [ESET Inspect](https://www.eset.com/int/business/solutions/xdr-extended-detection-and-response/) using the provided [REST API](https://help.eset.com/ei_navigate/latest/en-US/api.html). This API is present in ESET Inspect version 1.4 and later. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Access to the ESET PROTECT console**: Permissions to add users + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to ESET Inspect to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**1. Step 1 - Create an API user** + +1. Log into the ESET PROTECT console with an administrator account, select the **More** tab and the **Users** subtab. +2. Click on the **ADD NEW** button and add a **native user**. +3. Create a new user for the API account. **Optional:** Select a **Home group** other than **All** to limit what detections are ingested. +4. Under the **Permission Sets** tab, assign the **Inspect reviewer permission set**. +4. Log out of the administrator account and log into the console with the new API credentials for validation, then log out of the API account. +5. + +**2. Step 2 - Copy Workspace ID and Key** + +>**IMPORTANT:** Before deploying the ESET Inspect connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Step 3 - Deploy the Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the ESET Inspect connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESETInspect-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password** , enter the **Inspect base URL** and the **first ID** to start ingesting detections from. + - The defailt starting ID is **0**. This means that all detections will be ingested. + - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. +4. Mark the checkbox labelled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/esetprotect.md b/Tools/Solutions Analyzer/connector-docs/connectors/esetprotect.md index e823f6a0ac0..4b8f22b1465 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/esetprotect.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/esetprotect.md @@ -10,4 +10,52 @@ This connector gathers all events generated by ESET software through the central management solution ESET PROTECT (formerly ESET Security Management Center). This includes Anti-Virus detections, Firewall detections but also more advanced EDR detections. For a complete list of events please refer to [the documentation](https://help.eset.com/protect_admin/latest/en-US/events-exported-to-json-format.html). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias ESETPROTECT and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESETPROTECT/Parsers/ESETPROTECT.txt).The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + +1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. +2. Select **Apply below configuration to my machines** and select the facilities and severities. The default ESET PROTECT facility is **user**. +3. Click **Save**. +- **Open Syslog settings** + +**3. Configure ESET PROTECT** + +Configure ESET PROTECT to send all events through Syslog. + +1. Follow [these instructions](https://help.eset.com/protect_admin/latest/en-US/admin_server_settings_syslog.html) to configure syslog output. Make sure to select **BSD** as the format and **TCP** as the transport. + +2. Follow [these instructions](https://help.eset.com/protect_admin/latest/en-US/admin_server_settings_export_to_syslog.html) to export all logs to syslog. Select **JSON** as the output format. + +Note:- Refer to the [documentation](https://learn.microsoft.com/en-us/azure/sentinel/connect-log-forwarder?tabs=rsyslog#security-considerations) for setting up the log forwarder for both local and cloud storage. +- **Open Syslog settings** + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/esetprotectplatform.md b/Tools/Solutions Analyzer/connector-docs/connectors/esetprotectplatform.md index a5d02fef39b..6f55ab0c949 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/esetprotectplatform.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/esetprotectplatform.md @@ -10,4 +10,46 @@ The ESET Protect Platform data connector enables users to inject detections data from [ESET Protect Platform](https://www.eset.com/int/business/protect-platform/) using the provided [Integration REST API](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESET%20Protect%20Platform/Data%20Connectors). Integration REST API runs as scheduled Azure Function App. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Permission to register an application in Microsoft Entra ID**: Sufficient permissions to register an application with your Microsoft Entra tenant are required. +- **Permission to assign a role to the registered application**: Permission to assign the Monitoring Metrics Publisher role to the registered application in Microsoft Entra ID is required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** The ESET Protect Platform data connector uses Azure Functions to connect to the ESET Protect Platform via Eset Connect API to pull detections logs into Microsoft Sentinel. This process might result in additional data ingestion costs. See details on the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/). + +>**NOTE:** The newest version of the ESET PROTECT Platform and Microsoft Sentinel integration pulls not only detections logs but also newly created incidents. If your integration was set up before 20.06.2025, please follow [these steps](https://help.eset.com/eset_connect/en-US/update_ms_sentinel_integration.html) to update it. + +**1. Step 1 - Create an API user** + +Use this [instruction](https://help.eset.com/eset_connect/en-US/create_api_user_account.html) to create an ESET Connect API User account with **Login** and **Password**. + +**2. Step 2 - Create a registered application** + +Create a Microsoft Entra ID registered application by following the steps in the [Register a new application instruction.](https://learn.microsoft.com/en-us/entra/identity-platform/quickstart-register-app) + +**3. Step 3 - Deploy the ESET Protect Platform data connector using the Azure Resource Manager (ARM) template** + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-EsetProtectionPlatform-azuredeploy) + +2. Select the name of the **Log Analytics workspace** associated with your Microsoft Sentinel. Select the same **Resource Group** as the Resource Group of the Log Analytics workspace. + +3. Type the parameters of the registered application in Microsoft Entra ID: **Azure Client ID**, **Azure Client Secret**, **Azure Tenant ID**, **Object ID**. You can find the **Object ID** on Azure Portal by following this path +> Microsoft Entra ID -> Manage (on the left-side menu) -> Enterprise applications -> Object ID column (the value next to your registered application name). + +4. Provide the ESET Connect API user account **Login** and **Password** obtained in **Step 1**. + +5. Select one or more ESET products (ESET PROTECT, ESET Inspect, ESET Cloud Office Security) from which detections are retrieved. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/esetsmc.md b/Tools/Solutions Analyzer/connector-docs/connectors/esetsmc.md index 0d04c323908..d2c4444a50f 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/esetsmc.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/esetsmc.md @@ -10,4 +10,130 @@ Connector for [Eset SMC](https://help.eset.com/esmc_admin/72/en-US/) threat events, audit logs, firewall events and web sites filter. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Access to Eset SMC console**: Permissions to configure log export + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure rsyslog to accept logs from your Eset SMC IP address. + +``` +sudo -i + +# Set ESET SMC source IP address +export ESETIP={Enter your IP address} + +# Create rsyslog configuration file +cat > /etc/rsyslog.d/80-remote.conf << EOF +\$ModLoad imudp +\$UDPServerRun 514 +\$ModLoad imtcp +\$InputTCPServerRun 514 +\$AllowedSender TCP, 127.0.0.1, $ESETIP +\$AllowedSender UDP, 127.0.0.1, $ESETIP +user.=alert;user.=crit;user.=debug;user.=emerg;user.=err;user.=info;user.=notice;user.=warning @127.0.0.1:25224 +EOF + +# Restart rsyslog +systemctl restart rsyslog``` + +**3. Configure OMS agent to pass Eset SMC data in API format** + +In order to easily recognize Eset data we will push it to separate table and parse at agent so query in Azure Sentinel is easier and fast. To make it simple we will just modify ```match oms.**``` section to send data as API objects by changing type to out_oms_api. Modify file on /etc/opt/microsoft/omsagent/{REPLACEyourworkspaceid}/conf/omsagent.conf. Full ```match oms.**``` section looks like this: + +``` + + type out_oms_api + log_level info + num_threads 5 + run_in_background false + + omsadmin_conf_path /etc/opt/microsoft/omsagent/{REPLACEyourworkspaceid}/conf/omsadmin.conf + cert_path /etc/opt/microsoft/omsagent/{REPLACEyourworkspaceid}/certs/oms.crt + key_path /etc/opt/microsoft/omsagent/{REPLACEyourworkspaceid}/certs/oms.key + + buffer_chunk_limit 15m + buffer_type file + buffer_path /var/opt/microsoft/omsagent/{REPLACEyourworkspaceid}/state/out_oms_common*.buffer + + buffer_queue_limit 10 + buffer_queue_full_action drop_oldest_chunk + flush_interval 20s + retry_limit 10 + retry_wait 30s + max_retry_wait 9m + +``` + +**4. Change OMS agent configuration to catch tag oms.api.eset and parse structured data** + +Modify file /etc/opt/microsoft/omsagent/{REPLACEyourworkspaceid}/conf/omsagent.d/syslog.conf +``` + + type syslog + port 25224 + bind 127.0.0.1 + protocol_type udp + tag oms.api.eset + + + + @type parser + key_name message + format /(?.*?{.*})/ + + + + @type parser + key_name message + format json + +``` + +**5. Disable automatic configuration and restart agent** + +```bash +# Disable changes to configuration files from Portal +sudo su omsagent -c 'python /opt/microsoft/omsconfig/Scripts/OMS_MetaConfigHelper.py --disable' + +# Restart agent +sudo /opt/microsoft/omsagent/bin/service_control restart + +# Check agent logs +tail -f /var/opt/microsoft/omsagent/log/omsagent.log +``` + +**6. Configure Eset SMC to send logs to connector** + +Configure Eset Logs using BSD style and JSON format. +- Go to Syslog server configuration as described in [Eset documentation](https://help.eset.com/esmc_admin/72/en-US/admin_server_settings.html?admin_server_settings_syslog.html) and configure Host (your connector), Format BSD, Transport TCP +- Go to Logging section as described in [Eset documentation](https://help.eset.com/esmc_admin/72/en-US/admin_server_settings.html?admin_server_settings_export_to_syslog.html) and enable JSON + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/esi-exchangeadminauditlogevents.md b/Tools/Solutions Analyzer/connector-docs/connectors/esi-exchangeadminauditlogevents.md index f41b951e8f6..c93bab2c885 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/esi-exchangeadminauditlogevents.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/esi-exchangeadminauditlogevents.md @@ -10,4 +10,523 @@ Deprecated, use the 'ESI-Opt' dataconnectors. You can stream all Exchange Audit events, IIS Logs, HTTP Proxy logs and Security Event logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to view dashboards, create custom alerts, and improve investigation. This is used by Microsoft Exchange Security Workbooks to provide security insights of your On-Premises Exchange environment +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Detailled documentation**: >**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub) + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions) + +**1. Download and install the agents needed to collect logs for Microsoft Sentinel** + +Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy. +**Deploy Monitor Agents** + + This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers + **Select which agent you want to install in your servers to collect logs:** + +**[Prefered] Azure Monitor Agent via Azure Arc** + + **Deploy the Azure Arc Agent** +> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc) + + **Install Azure Log Analytics Agent (Deprecated on 31/08/2024)** + + 1. Download the Azure Log Analytics Agent and choose the deployment method in the below link. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Deploy log injestion following choosed options** +**[Option 1] MS Exchange Management Log collection** + + Select how to stream MS Exchange Admin Audit event logs + **MS Exchange Admin Audit event logs** + +**Data Collection Rules - When Azure Monitor Agent is used** + + **Enable data collection rule** +> Microsoft Exchange Admin Audit Events logs are collected only from **Windows** agents. +**Option 1 - Azure Resource Manager (ARM) Template** + + Use this method for automated deployment of the DCR. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption1-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace Name** 'and/or Other required fields'. +>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + + **Option 2 - Manual Deployment of Azure Automation** + + Use the following step-by-step instructions to deploy manually a Data Collection Rule. +**A. Create DCR, Type Event log** + + 1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules). +2. Click **+ Create** at the top. +3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. +4. In the **Resources** tab, enter you Exchange Servers. +5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Custom' option, enter 'MSExchange Management' as expression and Add it. +6. 'Make other preferable configuration changes', if needed, then click **Create**. + + **Assign the DCR to all Exchange Servers** + + Add all your Exchange Servers to the DCR + + **Data Collection Rules - When the legacy Azure Log Analytics Agent is used** + + **Configure the logs to be collected** + +Configure the Events you want to collect and their severities. + +1. Under workspace **Legacy agents management**, select **Windows Event logs**. +2. Click **Add Windows event log** and enter **MSExchange Management** as log name. +3. Collect Error, Warning and Information types +4. Click **Save**. + - **Open Syslog settings** +**[Option 2] Security/Application/System logs of Exchange Servers** + + Select how to stream Security/Application/System logs of Exchange Servers + **Security Event log collection** + +**Data Collection Rules - Security Event logs** + + **Enable data collection rule for Security Logs** +Security Events logs are collected only from **Windows** agents. +1. Add Exchange Servers on *Resources* tab. +2. Select Security log level + +> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition. + - **Create data collection rule** + **Application and System Event log collection** + +**Data Collection Rules - When Azure Monitor Agent is used** + + **Enable data collection rule** +> Application and System Events logs are collected only from **Windows** agents. +**Option 1 - Azure Resource Manager (ARM) Template** + + Use this method for automated deployment of the DCR. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption2-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace Name** 'and/or Other required fields'. +>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + + **Option 2 - Manual Deployment of Azure Automation** + + Use the following step-by-step instructions to deploy manually a Data Collection Rule. +**A. Create DCR, Type Event log** + + 1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules). +2. Click **+ Create** at the top. +3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. +4. In the **Resources** tab, enter you Exchange Servers. +5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Basic' option. +6. For Application, select 'Critical', 'Error' and 'Warning'. For System, select Critical/Error/Warning/Information. +7. 'Make other preferable configuration changes', if needed, then click **Create**. + + **Assign the DCR to all Exchange Servers** + + Add all your Exchange Servers to the DCR + + **Data Collection Rules - When the legacy Azure Log Analytics Agent is used** + + **Configure the logs to be collected** + +Configure the Events you want to collect and their severities. + +1. Under workspace advanced settings **Configuration**, select **Data** and then **Windows Event logs**. +2. Click **Add Windows event log** and search **Application** as log name. +3. Click **Add Windows event log** and search **System** as log name. +4. Collect Error (for all), Warning (for all) and Information (for System) types +5. Click **Save**. + - **Open Syslog settings** +**[Option 3 and 4] Security logs of Domain Controllers** + + Select how to stream Security logs of Domain Controllers. If you want to implement Option 3, you just need to select DC on same site as Exchange Servers. If you want to implement Option 4, you can select all DCs of your forest. +**[Option 3] List only Domain Controllers on the same site as Exchange Servers for next step** + + **This limits the quantity of data injested but some incident can't be detected.** + + **[Option 4] List all Domain Controllers of your Active-Directory Forest for next step** + + **This allows collecting all security events** + **Security Event log collection** + +**Data Collection Rules - Security Event logs** + + **Enable data collection rule for Security Logs** +Security Events logs are collected only from **Windows** agents. +1. Add chosen DCs on *Resources* tab. +2. Select Security log level + +> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition. + - **Create data collection rule** +**[Option 5] IIS logs of Exchange Servers** + + Select how to stream IIS logs of Exchange Servers +**Data Collection Rules - When Azure Monitor Agent is used** + + **Enable data collection rule** +> IIS logs are collected only from **Windows** agents. + > 📋 **Additional Configuration Step**: This connector includes a configuration step of type `AdminAuditEvents`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. +**Option 1 - Azure Resource Manager (ARM) Template** + + Use this method for automated deployment of the DCE and DCR. +**A. Create DCE (If not already created for Exchange Servers)** + + 1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. You can change the proposed name of the DCE. +5. Click **Create** to deploy. + + **B. Deploy Data Connection Rule** + + 1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption5-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID** 'and/or Other required fields'. +>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + + **Option 2 - Manual Deployment of Azure Automation** + + Use the following step-by-step instructions to deploy manually a Data Collection Rule. +**A. Create DCE (If not already created for Exchange Servers)** + + 1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints). +2. Click **+ Create** at the top. +3. In the **Basics** tab, fill the required fields and give a name to the DCE. +3. 'Make other preferable configuration changes', if needed, then click **Create**. + + **B. Create DCR, Type IIS log** + + 1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules). +2. Click **+ Create** at the top. +3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. Select the created DCE. +4. In the **Resources** tab, enter you Exchange Servers. +5. In 'Collect and deliver', add a Data Source type 'IIS logs' (Do not enter a path if IIS Logs path is configured by default). Click on 'Add data source' +6. 'Make other preferable configuration changes', if needed, then click **Create**. + + **Assign the DCR to all Exchange Servers** + + Add all your Exchange Servers to the DCR + + **Data Collection Rules - When the legacy Azure Log Analytics Agent is used** + + **Configure the logs to be collected** + +Configure the Events you want to collect and their severities. + +1. Under workspace advanced settings **Configuration**, select **Data** and then **IIS Logs**. +2. Check **Collect W3C format IIS log files** +5. Click **Save**. + - **Open Syslog settings** +**[Option 6] Message Tracking of Exchange Servers** + + Select how to stream Message Tracking of Exchange Servers +**Data Collection Rules - When Azure Monitor Agent is used** + + **Enable data collection rule** +> Message Tracking are collected only from **Windows** agents. + + ℹ️ **Attention**, Custom logs in Monitor Agent is in Preview. The deployment doesn't work as expected for the moment (March 2023). +**Option 1 - Azure Resource Manager (ARM) Template** + + Use this method for automated deployment of the DCE and DCR. +**A. Create DCE (If not already created for Exchange Servers)** + + 1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. You can change the proposed name of the DCE. +5. Click **Create** to deploy. + + **B. Deploy Data Connection Rule and Custom Table** + + 1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption6-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID** 'and/or Other required fields'. +>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + + **Option 2 - Manual Deployment of Azure Automation** + + Use the following step-by-step instructions to deploy manually a Data Collection Rule. +**A. Create DCE (If not already created for Exchange Servers)** + + 1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints). +2. Click **+ Create** at the top. +3. In the **Basics** tab, fill the required fields and give a name to the DCE, like ESI-ExchangeServers. +3. 'Make other preferable configuration changes', if needed, then click **Create**. + + **B. Create Custom DCR Table** + + 1. Download the Example file from [Microsoft Sentinel GitHub](https://aka.ms/Sentinel-Sample-ESI-MessageTrackingExampleFile). +2. From the Azure Portal, navigate to [Workspace Analytics](https://portal.azure.com/#view/HubsExtension/BrowseResource/resourceType/Microsoft.OperationalInsights%2Fworkspaces) and select your target Workspace. +3. Click in 'Tables', click **+ Create** at the top and select **New Custom log (DCR-Based)**. +4. In the **Basics** tab, enter **MessageTrackingLog** on the Table name, create a Data Collection rule with the name **DCR-Option6-MessageTrackingLogs** (for example) and select the previously created Data collection Endpoint. +5. In the **Schema and Transformation** tab, choose the downloaded sample file and click on **Transformation Editor**. +6. In the transformation field, enter the following KQL request : +*source +| extend TimeGenerated = todatetime(['date-time']) +| extend + clientHostname = ['client-hostname'], + clientIP = ['client-ip'], + connectorId = ['connector-id'], + customData = ['custom-data'], + eventId = ['event-id'], + internalMessageId = ['internal-message-id'], + logId = ['log-id'], + messageId = ['message-id'], + messageInfo = ['message-info'], + messageSubject = ['message-subject'], + networkMessageId = ['network-message-id'], + originalClientIp = ['original-client-ip'], + originalServerIp = ['original-server-ip'], + recipientAddress= ['recipient-address'], + recipientCount= ['recipient-count'], + recipientStatus= ['recipient-status'], + relatedRecipientAddress= ['related-recipient-address'], + returnPath= ['return-path'], + senderAddress= ['sender-address'], + senderHostname= ['server-hostname'], + serverIp= ['server-ip'], + sourceContext= ['source-context'], + schemaVersion=['schema-version'], + messageTrackingTenantId = ['tenant-id'], + totalBytes = ['total-bytes'], + transportTrafficType = ['transport-traffic-type'] +| project-away + ['client-ip'], + ['client-hostname'], + ['connector-id'], + ['custom-data'], + ['date-time'], + ['event-id'], + ['internal-message-id'], + ['log-id'], + ['message-id'], + ['message-info'], + ['message-subject'], + ['network-message-id'], + ['original-client-ip'], + ['original-server-ip'], + ['recipient-address'], + ['recipient-count'], + ['recipient-status'], + ['related-recipient-address'], + ['return-path'], + ['sender-address'], + ['server-hostname'], + ['server-ip'], + ['source-context'], + ['schema-version'], + ['tenant-id'], + ['total-bytes'], + ['transport-traffic-type']* + +8. Click 'Run' and after 'Apply'. +9. Click **Next**, then click **Create**. + + **C. Modify the created DCR, Type Custom log** + + 1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules). +2. Select the previously created DCR, like **DCR-Option6-MessageTrackingLogs**. +3. In the **Resources** tab, enter you Exchange Servers. +4. In **Data Sources**, add a Data Source type 'Custom Text logs' and enter 'C:\Program Files\Microsoft\Exchange Server\V15\TransportRoles\Logs\MessageTracking\*.log' in file pattern, 'MessageTrackingLog_CL' in Table Name. +6.in Transform field, enter the following KQL request : +*source +| extend TimeGenerated = todatetime(['date-time']) +| extend + clientHostname = ['client-hostname'], + clientIP = ['client-ip'], + connectorId = ['connector-id'], + customData = ['custom-data'], + eventId = ['event-id'], + internalMessageId = ['internal-message-id'], + logId = ['log-id'], + messageId = ['message-id'], + messageInfo = ['message-info'], + messageSubject = ['message-subject'], + networkMessageId = ['network-message-id'], + originalClientIp = ['original-client-ip'], + originalServerIp = ['original-server-ip'], + recipientAddress= ['recipient-address'], + recipientCount= ['recipient-count'], + recipientStatus= ['recipient-status'], + relatedRecipientAddress= ['related-recipient-address'], + returnPath= ['return-path'], + senderAddress= ['sender-address'], + senderHostname= ['server-hostname'], + serverIp= ['server-ip'], + sourceContext= ['source-context'], + schemaVersion=['schema-version'], + messageTrackingTenantId = ['tenant-id'], + totalBytes = ['total-bytes'], + transportTrafficType = ['transport-traffic-type'] +| project-away + ['client-ip'], + ['client-hostname'], + ['connector-id'], + ['custom-data'], + ['date-time'], + ['event-id'], + ['internal-message-id'], + ['log-id'], + ['message-id'], + ['message-info'], + ['message-subject'], + ['network-message-id'], + ['original-client-ip'], + ['original-server-ip'], + ['recipient-address'], + ['recipient-count'], + ['recipient-status'], + ['related-recipient-address'], + ['return-path'], + ['sender-address'], + ['server-hostname'], + ['server-ip'], + ['source-context'], + ['schema-version'], + ['tenant-id'], + ['total-bytes'], + ['transport-traffic-type']* +7. Click on 'Add data source'. + + **Assign the DCR to all Exchange Servers** + + Add all your Exchange Servers to the DCR + + **Data Collection Rules - When the legacy Azure Log Analytics Agent is used** + + **Configure the logs to be collected** + +1. Under workspace **Settings** part, select **Tables**, click **+ Create** and click on **New custom log (MMA-Based)**. +2. Select Sample file **[MessageTracking Sample](https://aka.ms/Sentinel-Sample-ESI-MessageTrackingLogsSampleCSV)** and click Next +3. Select type **Windows** and enter the path **C:\Program Files\Microsoft\Exchange Server\V15\TransportRoles\Logs\MessageTracking\*.log**. Click Next. +4. Enter **MessageTrackingLog** as Table name and click Next. +5. Click **Save**. + - **Open Syslog settings** +**[Option 7] HTTP Proxy of Exchange Servers** + + Select how to stream HTTP Proxy of Exchange Servers +**Data Collection Rules - When Azure Monitor Agent is used** + + **Enable data collection rule** +> Message Tracking are collected only from **Windows** agents. + + ℹ️ **Attention**, Custom logs in Monitor Agent is in Preview. The deployment doesn't work as expected for the moment (March 2023). +**Option 1 - Azure Resource Manager (ARM) Template** + + Use this method for automated deployment of the DCE and DCR. +**A. Create DCE (If not already created for Exchange Servers)** + + 1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. You can change the proposed name of the DCE. +5. Click **Create** to deploy. + + **B. Deploy Data Connection Rule** + + 1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption7-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID** 'and/or Other required fields'. +>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + + **Option 2 - Manual Deployment of Azure Automation** + + Use the following step-by-step instructions to deploy manually a Data Collection Rule. +**A. Create DCE (If not already created for Exchange Servers)** + + 1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints). +2. Click **+ Create** at the top. +3. In the **Basics** tab, fill the required fields and give a name to the DCE. +3. 'Make other preferable configuration changes', if needed, then click **Create**. + + **B. Create Custom DCR Table** + + 1. Download the Example file from [Microsoft Sentinel GitHub](https://aka.ms/Sentinel-Sample-ESI-HTTPProxyExampleFile). +2. From the Azure Portal, navigate to [Workspace Analytics](https://portal.azure.com/#view/HubsExtension/BrowseResource/resourceType/Microsoft.OperationalInsights%2Fworkspaces) and select your target Workspace. +3. Click in 'Tables', click **+ Create** at the top and select **New Custom log (DCR-Based)**. +4. In the **Basics** tab, enter **ExchangeHttpProxy** on the Table name, create a Data Collection rule with the name **DCR-Option7-HTTPProxyLogs** (for example) and select the previously created Data collection Endpoint. +5. In the **Schema and Transformation** tab, choose the downloaded sample file and click on **Transformation Editor**. +6. In the transformation field, enter the following KQL request : +*source +| extend TimeGenerated = todatetime(DateTime) +| project-away DateTime +* + +8. Click 'Run' and after 'Apply'. +9. Click **Next**, then click **Create**. + + **C. Modify the created DCR, Type Custom log** + + 1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules). +2. Select the previously created DCR, like **DCR-Option7-HTTPProxyLogs**. +3. In the **Resources** tab, enter you Exchange Servers. +4. In **Data Sources**, add a Data Source type 'Custom Text logs' and enter 'C:\Program Files\Microsoft\Exchange Server\V15\Logging\HttpProxy\Autodiscover\*.log' in file pattern, 'ExchangeHttpProxy_CL' in Table Name. +6.in Transform field, enter the following KQL request : +*source +| extend TimeGenerated = todatetime(DateTime) +| project-away DateTime* +7. Click on 'Add data source'. + + **Assign the DCR to all Exchange Servers** + + Add all your Exchange Servers to the DCR + + **Data Collection Rules - When the legacy Azure Log Analytics Agent is used** + + **Configure the logs to be collected** + +1. Under workspace **Settings** part, select **Tables**, click **+ Create** and click on **New custom log (MMA-Based)**. +2. Select Sample file **[MessageTracking Sample](https://aka.ms/Sentinel-Sample-ESI-HttpProxySampleCSV)** and click Next +3. Select type **Windows** and enter all the following paths **C:\Program Files\Microsoft\Exchange Server\V15\Logging\HttpProxy\Autodiscover\*.log**, **C:\Program Files\Microsoft\Exchange Server\V15\Logging\HttpProxy\Eas\*.log**, **C:\Program Files\Microsoft\Exchange Server\V15\Logging\HttpProxy\Ecp\*.log**, **C:\Program Files\Microsoft\Exchange Server\V15\Logging\HttpProxy\Ews\*.log**, **C:\Program Files\Microsoft\Exchange Server\V15\Logging\HttpProxy\Mapi\*.log**, **C:\Program Files\Microsoft\Exchange Server\V15\Logging\HttpProxy\Oab\*.log**, **C:\Program Files\Microsoft\Exchange Server\V15\Logging\HttpProxy\Owa\*.log**, **C:\Program Files\Microsoft\Exchange Server\V15\Logging\HttpProxy\OwaCalendar\*.log**, **C:\Program Files\Microsoft\Exchange Server\V15\Logging\HttpProxy\PowerShell\*.log** and **C:\Program Files\Microsoft\Exchange Server\V15\Logging\HttpProxy\RpcHttp\*.log** . Click Next. +4. Enter **ExchangeHttpProxy** as Table name and click Next. +5. Click **Save**. + - **Open Syslog settings** + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Parsers are automatically deployed with the solution. Follow the steps to create the Kusto Functions alias : [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser) +**Parsers are automatically deployed during Solution deployment. If you want to deploy manually, follow the steps below** + +**Manual Parser Deployment** +**1. Download the Parser file** + + The latest version of the file [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser) + + **2. Create Parser **ExchangeAdminAuditLogs** function** + + In 'Logs' explorer of your Microsoft Sentinel's log analytics, copy the content of the file to Log explorer + + **3. Save Parser **ExchangeAdminAuditLogs** function** + + Click on save button. + No parameter is needed for this parser. +Click save again. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/esi-exchangeonlinecollector.md b/Tools/Solutions Analyzer/connector-docs/connectors/esi-exchangeonlinecollector.md index 7e541e873b8..d03fe569d4b 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/esi-exchangeonlinecollector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/esi-exchangeonlinecollector.md @@ -10,4 +10,159 @@ Connector used to push Exchange Online Security configuration for Microsoft Sentinel Analysis +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **microsoft.automation/automationaccounts permissions**: Read and write permissions to create an Azure Automation with a Runbook is required. [See the documentation to learn more about Automation Account](https://learn.microsoft.com/en-us/azure/automation/overview). +- **Microsoft.Graph permissions**: Groups.Read, Users.Read and Auditing.Read permissions are required to retrieve user/group information linked to Exchange Online assignments. [See the documentation to learn more](https://aka.ms/sentinel-ESI-OnlineCollectorPermissions). +- **Exchange Online permissions**: Exchange.ManageAsApp permission and **Global Reader** or **Security Reader** Role are needed to retrieve the Exchange Online Security Configuration.[See the documentation to learn more](https://aka.ms/sentinel-ESI-OnlineCollectorPermissions). +- **(Optional) Log Storage permissions**: Storage Blob Data Contributor to a storage account linked to the Automation Account Managed identity or an Application ID is mandatory to store logs.[See the documentation to learn more](https://aka.ms/sentinel-ESI-OnlineCollectorPermissions). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE - UPDATE** + +ℹ️

NOTE - UPDATE:

We recommend to Update the Collector to Version 7.6.0.0 or highier.
The Collector Script Update procedure could be found here : ESI Online Collector Update + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Follow the steps for each Parser to create the Kusto Functions alias : [**ExchangeConfiguration**](https://aka.ms/sentinel-ESI-ExchangeConfiguration-Online-parser) and [**ExchangeEnvironmentList**](https://aka.ms/sentinel-ESI-ExchangeEnvironmentList-Online-parser) + +**STEP 1 - Parsers deployment** +**Parser deployment (When using Microsoft Exchange Security Solution, Parsers are automatically deployed)** + +**1. Download the Parser files** + + The latest version of the 2 files [**ExchangeConfiguration.yaml**](https://aka.ms/sentinel-ESI-ExchangeConfiguration-Online-parser) and [**ExchangeEnvironmentList.yaml**](https://aka.ms/sentinel-ESI-ExchangeEnvironmentList-Online-parser) + + **2. Create Parser **ExchangeConfiguration** function** + + In 'Logs' explorer of your Microsoft Sentinel's log analytics, copy the content of the file to Log explorer + + **3. Save Parser **ExchangeConfiguration** function** + + Click on save button. + Define the parameters as asked on the header of the parser file. +Click save again. + + **4. Reproduce the same steps for Parser **ExchangeEnvironmentList**** + + Reproduce the step 2 and 3 with the content of 'ExchangeEnvironmentList.yaml' file + +>**NOTE:** This connector uses Azure Automation to connect to 'Exchange Online' to pull its Security analysis into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Automation pricing page](https://azure.microsoft.com/pricing/details/automation/) for details. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Automation** + +>**IMPORTANT:** Before deploying the 'ESI Exchange Online Security Configuration' connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Exchange Online tenant name (contoso.onmicrosoft.com), readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the 'ESI Exchange Online Security Configuration' connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-ExchangeCollector-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID**, **Workspace Key**, **Tenant Name**, 'and/or Other required fields'. +>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**Option 2 - Manual Deployment of Azure Automation** + + Use the following step-by-step instructions to deploy the 'ESI Exchange Online Security Configuration' connector manually with Azure Automation. +**A. Create the Azure Automation Account** + + 1. From the Azure Portal, navigate to [Azure Automation Account](https://portal.azure.com/#view/HubsExtension/BrowseResource/resourceType/Microsoft.Automation%2FAutomationAccounts). +2. Click **+ Add** at the top. +3. In the **Basics** tab, fill the required fields and give a name to the Azure Automation. +4. In the **Advanced** and **Networking** and **Tags** Tabs, leave fields as default if you don't need to customize them. +5. 'Make other preferable configuration changes', if needed, then click **Create**. + + **B. Add Exchange Online Management Module, Microsoft Graph (Authentication, User and Group) Modules** + + 1. On the Automation Account page, select **Modules**. +2. Click on **Browse gallery** and search the **ExchangeOnlineManagement** module. +3. Select it and click on **Select**. +4. Choose Version **5.1** on Runtime version field and click on Import button. +Repeat the step for the following modules : 'Microsoft.Graph.Authentication', 'Microsoft.Graph.Users' and 'Microsoft.Graph.Groups. **Attention, you need to wait for Microsoft.Graph.Authentication installation before processing next modules** + + **C. Download the Runbook Content** + + 1. Download the latest version of ESI Collector. The latest version can be found here : https://aka.ms/ESI-ExchangeCollector-Script +2. Unzip the file to find the JSON file and the PS1 file for next step. + + **D. Create Runbook** + + 1. On the Automation Account page, select the **Runbooks** button. +2. Click on **Create a runbook** and name it like 'ESI-Collector' with a runbook type **PowerShell**, Runtime Version **5.1** and click 'Create'. +2. Import the content of the previous step's PS1 file in the Runbook window. +3. Click on **Publish** + + **E. Create GlobalConfiguration Variable** + + 1. On the Automation Account page, select the **Variables** button. +2. Click on **Add a Variable** and name it exaclty 'GlobalConfiguration' with a type **String**. +2. On 'Value' field, copy the content of the previous step's JSON file. +3. Inside the content, replace the values of **WorkspaceID** and **WorkspaceKey**. +4. Click on 'Create' button. + + **F. Create TenantName Variable** + + 1. On the Automation Account page, select the **Variables** button. +2. Click on **Add a Variable** and name it exaclty 'TenantName' with a type **String**. +3. On 'Value' field, write the tenant name of your Exchange Online. +4. Click on 'Create' button. + + **G. Create LastDateTracking Variable** + + 1. On the Automation Account page, select the **Variables** button. +2. Click on **Add a Variable** and name it exaclty 'LastDateTracking' with a type **String**. +3. On 'Value' field, write 'Never'. +4. Click on 'Create' button. + + **H. Create a Runbook Schedule** + + 1. On the Automation Account page, select the **Runbook** button and click on your created runbook. +2. Click on **Schedules** and **Add a schedule** button. +3. Click on **Schedule**, **Add a Schedule** and name it. Select **Recurring** value with a reccurence of every 1 day, click 'Create'. +4. Click on 'Configure parameters and run settings'. Leave all empty and click on **OK** and **OK** again. + +**STEP 3 - Assign Microsoft Graph Permission and Exchange Online Permission to Managed Identity Account** + +To be able to collect Exchange Online information and to be able to retrieve User information and memberlist of admin groups, the automation account need multiple permission. +**Assign Permissions by Script** + +**A. Download Permission Script** + + [Permission Update script](https://aka.ms/ESI-ExchangeCollector-Permissions) + + **B. Retrieve the Azure Automation Managed Identity GUID and insert it in the downloaded script** + + 1. Go to your Automation Account, in the **Identity** Section. You can find the Guid of your Managed Identity. +2. Replace the GUID in $MI_ID = "XXXXXXXXXXX" with the GUID of your Managed Identity. + + **C. Launch the script with a **Global-Administrator** account** + + **Attention this script requires MSGraph Modules and Admin Consent to access to your tenant with Microsoft Graph**. + The script will add 3 permissions to the Managed identity: + 1. Exchange Online ManageAsApp permission + 2. User.Read.All on Microsoft Graph API + 3. Group.Read.All on Microsoft Graph API + + **D. Exchange Online Role Assignment** + + 1. As a **Global Administrator**, go to **Roles and Administrators**. +2. Select **Global Reader** role or **Security Reader** and click to 'Add assignments'. +3. Click on 'No member selected' and search your Managed Identity account Name beginning by **the name of your automation account** like 'ESI-Collector'. Select it and click on 'Select'. +4. Click **Next** and validate the assignment by clicking **Assign**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/esi-exchangeonpremisescollector.md b/Tools/Solutions Analyzer/connector-docs/connectors/esi-exchangeonpremisescollector.md index 2c2df4b6218..29fd432b1b0 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/esi-exchangeonpremisescollector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/esi-exchangeonpremisescollector.md @@ -10,4 +10,75 @@ Connector used to push Exchange On-Premises Security configuration for Microsoft Sentinel Analysis +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Service Account with Organization Management role**: The service Account that launch the script as scheduled task needs to be Organization Management to be able to retrieve all the needed security Information. +- **Detailled documentation**: >**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub) + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Install the ESI Collector Script on a server with Exchange Admin PowerShell console** + +This is the script that will collect Exchange Information to push content in Microsoft Sentinel. +**Script Deployment** + +**Download the latest version of ESI Collector** + + The latest version can be found here : https://aka.ms/ESI-ExchangeCollector-Script. The file to download is CollectExchSecIns.zip + + **Copy the script folder** + + Unzip the content and copy the script folder on a server where Exchange PowerShell Cmdlets are present. + + **Unblock the PS1 Scripts** + + Click right on each PS1 Script and go to Properties tab. + If the script is marked as blocked, unblock it. You can also use the Cmdlet 'Unblock-File *.* in the unzipped folder using PowerShell. + + **Configure Network Access** + + Ensure that the script can contact Azure Analytics (*.ods.opinsights.azure.com). + +**2. Configure the ESI Collector Script** + +Be sure to be local administrator of the server. +In 'Run as Administrator' mode, launch the 'setup.ps1' script to configure the collector. + Fill the Log Analytics (Microsoft Sentinel) Workspace information. + Fill the Environment name or leave empty. By default, choose 'Def' as Default analysis. The other choices are for specific usage. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Schedule the ESI Collector Script (If not done by the Install Script due to lack of permission or ignored during installation)** + +The script needs to be scheduled to send Exchange configuration to Microsoft Sentinel. + We recommend to schedule the script once a day. + The account used to launch the Script needs to be member of the group Organization Management + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Parsers are automatically deployed with the solution. Follow the steps to create the Kusto Functions alias : [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser) +**Parsers are automatically deployed during Solution deployment. If you want to deploy manually, follow the steps below** + +**Manual Parser Deployment** +**1. Download the Parser file** + + The latest version of the file [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser) + + **2. Create Parser **ExchangeAdminAuditLogs** function** + + In 'Logs' explorer of your Microsoft Sentinel's log analytics, copy the content of the file to Log explorer + + **3. Save Parser **ExchangeAdminAuditLogs** function** + + Click on save button. + No parameter is needed for this parser. +Click save again. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/esi-opt1exchangeadminauditlogsbyeventlogs.md b/Tools/Solutions Analyzer/connector-docs/connectors/esi-opt1exchangeadminauditlogsbyeventlogs.md index c0539cb52fe..dbf996957c1 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/esi-opt1exchangeadminauditlogsbyeventlogs.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/esi-opt1exchangeadminauditlogsbyeventlogs.md @@ -10,4 +10,85 @@ [Option 1] - Using Azure Monitor Agent - You can stream all Exchange Audit events from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to view dashboards, create custom alerts, and improve investigation. This is used by Microsoft Exchange Security Workbooks to provide security insights of your On-Premises Exchange environment +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Detailled documentation**: >**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub) + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions) + +>This Data Connector is the **option 1** of the wiki. + +**1. Download and install the agents needed to collect logs for Microsoft Sentinel** + +Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy. +**Deploy Monitor Agents** + + This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers +**Deploy the Azure Arc Agent** +> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc) + +**2. [Option 1] MS Exchange Management Log collection - MS Exchange Admin Audit event logs by Data Collection Rules** + +The MS Exchange Admin Audit event logs are collected using Data Collection Rules (DCR) and allow to store all Administrative Cmdlets executed in an Exchange environment. +**DCR** + +**Data Collection Rules Deployment** + + **Enable data collection rule** +> Microsoft Exchange Admin Audit Events logs are collected only from **Windows** agents. +**Option 1 - Azure Resource Manager (ARM) Template (Prefered)** + + Use this method for automated deployment of the DCR. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption1-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace Name** 'and/or Other required fields'. +>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + + **Option 2 - Manual Deployment of Azure Automation** + + Use the following step-by-step instructions to deploy manually a Data Collection Rule. +**A. Create DCR, Type Event log** + + 1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules). +2. Click **+ Create** at the top. +3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. +4. In the **Resources** tab, enter you Exchange Servers. +5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Custom' option, enter 'MSExchange Management' as expression and Add it. +6. 'Make other preferable configuration changes', if needed, then click **Create**. + + **Assign the DCR to all Exchange Servers** + + Add all your Exchange Servers to the DCR + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Parsers are automatically deployed with the solution. Follow the steps to create the Kusto Functions alias : [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser) +**Parsers are automatically deployed during Solution deployment. If you want to deploy manually, follow the steps below** + +**Manual Parser Deployment** +**1. Download the Parser file** + + The latest version of the file [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser) + + **2. Create Parser **ExchangeAdminAuditLogs** function** + + In 'Logs' explorer of your Microsoft Sentinel's log analytics, copy the content of the file to Log explorer + + **3. Save Parser **ExchangeAdminAuditLogs** function** + + Click on save button. + No parameter is needed for this parser. +Click save again. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/esi-opt2exchangeserverseventlogs.md b/Tools/Solutions Analyzer/connector-docs/connectors/esi-opt2exchangeserverseventlogs.md index f004efea803..2bba2646c68 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/esi-opt2exchangeserverseventlogs.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/esi-opt2exchangeserverseventlogs.md @@ -10,4 +10,79 @@ [Option 2] - Using Azure Monitor Agent - You can stream all Exchange Security & Application Event logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to create custom alerts, and improve investigation. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Azure Log Analytics will be deprecated**: Azure Log Analytics will be deprecated, to collect data from non-Azure VMs, Azure Arc is recommended. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc) +- **Detailled documentation**: >**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub) + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions) + +>This Data Connector is the **option 2** of the wiki. + +**1. Download and install the agents needed to collect logs for Microsoft Sentinel** + +Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy. +**Deploy Monitor Agents** + + This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers +**Deploy the Azure Arc Agent** +> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc) + +**2. [Option 2] Security/Application/System logs of Exchange Servers** + +The Security/Application/System logs of Exchange Servers are collected using Data Collection Rules (DCR). +**Security Event log collection** + +**Data Collection Rules - Security Event logs** + + **Enable data collection rule for Security Logs** +Security Events logs are collected only from **Windows** agents. +1. Add Exchange Servers on *Resources* tab. +2. Select Security log level + +> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition. + - **Create data collection rule** +**Application and System Event log collection** + +**Enable data collection rule** + + > Application and System Events logs are collected only from **Windows** agents. +**Option 1 - Azure Resource Manager (ARM) Template (Prefered method)** + + Use this method for automated deployment of the DCR. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption2-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace Name** 'and/or Other required fields'. +>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + + **Option 2 - Manual Deployment of Azure Automation** + + Use the following step-by-step instructions to deploy manually a Data Collection Rule. +**A. Create DCR, Type Event log** + + 1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules). +2. Click **+ Create** at the top. +3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. +4. In the **Resources** tab, enter you Exchange Servers. +5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Basic' option. +6. For Application, select 'Critical', 'Error' and 'Warning'. For System, select Critical/Error/Warning/Information. +7. 'Make other preferable configuration changes', if needed, then click **Create**. + + **Assign the DCR to all Exchange Servers** + + Add all your Exchange Servers to the DCR + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/esi-opt34domaincontrollerssecurityeventlogs.md b/Tools/Solutions Analyzer/connector-docs/connectors/esi-opt34domaincontrollerssecurityeventlogs.md index 4599e879a08..cae80326708 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/esi-opt34domaincontrollerssecurityeventlogs.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/esi-opt34domaincontrollerssecurityeventlogs.md @@ -10,4 +10,52 @@ [Option 3 & 4] - Using Azure Monitor Agent -You can stream a part or all Domain Controllers Security Event logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to create custom alerts, and improve investigation. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Detailled documentation**: >**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub) + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions) + +>This Data Connector is the **option 3 and 4** of the wiki. + +**1. Download and install the agents needed to collect logs for Microsoft Sentinel** + +Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy. +**Deploy Monitor Agents** + + This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers +**Deploy the Azure Arc Agent** +> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc) + +**1. Security logs of Domain Controllers** + +Select how to stream Security logs of Domain Controllers. If you want to implement Option 3, you just need to select DC on same site as Exchange Servers. If you want to implement Option 4, you can select all DCs of your forest. +**[Option 3] List only Domain Controllers on the same site as Exchange Servers for next step** + + **This limits the quantity of data injested but some incident can't be detected.** + + **[Option 4] List all Domain Controllers of your Active-Directory Forest for next step** + + **This allows collecting all security events** +**Security Event log collection** + +**Data Collection Rules - Security Event logs** + + **Enable data collection rule for Security Logs** +Security Events logs are collected only from **Windows** agents. +1. Add chosen DCs on *Resources* tab. +2. Select Security log level + +> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition. + - **Create data collection rule** + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/esi-opt5exchangeiislogs.md b/Tools/Solutions Analyzer/connector-docs/connectors/esi-opt5exchangeiislogs.md index 18f7b9500f6..baa159acad9 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/esi-opt5exchangeiislogs.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/esi-opt5exchangeiislogs.md @@ -10,4 +10,82 @@ [Option 5] - Using Azure Monitor Agent - You can stream all IIS Logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to create custom alerts, and improve investigation. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Detailled documentation**: >**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub) + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions) + +>This Data Connector is the **option 5** of the wiki. + +**1. Download and install the agents needed to collect logs for Microsoft Sentinel** + +Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy. +**Deploy Monitor Agents** + + This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers +**Deploy the Azure Arc Agent** +> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc) + +**1. [Option 5] IIS logs of Exchange Servers** + +Select how to stream IIS logs of Exchange Servers +**Enable data collection rule** + + > IIS logs are collected only from **Windows** agents. + > 📋 **Additional Configuration Step**: This connector includes a configuration step of type `AdminAuditEvents`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. +**Option 1 - Azure Resource Manager (ARM) Template (Preferred Method)** + + Use this method for automated deployment of the DCE and DCR. +**A. Create DCE (If not already created for Exchange Servers)** + + 1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. You can change the proposed name of the DCE. +5. Click **Create** to deploy. + + **B. Deploy Data Connection Rule** + + 1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption5-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID** 'and/or Other required fields'. +>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + + **Option 2 - Manual Deployment of Azure Automation** + + Use the following step-by-step instructions to deploy manually a Data Collection Rule. +**A. Create DCE (If not already created for Exchange Servers)** + + 1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints). +2. Click **+ Create** at the top. +3. In the **Basics** tab, fill the required fields and give a name to the DCE. +3. 'Make other preferable configuration changes', if needed, then click **Create**. + + **B. Create DCR, Type IIS log** + + 1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules). +2. Click **+ Create** at the top. +3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. Select the created DCE. +4. In the **Resources** tab, enter you Exchange Servers. +5. In 'Collect and deliver', add a Data Source type 'IIS logs' (Do not enter a path if IIS Logs path is configured by default). Click on 'Add data source' +6. 'Make other preferable configuration changes', if needed, then click **Create**. + + **Assign the DCR to all Exchange Servers** + + Add all your Exchange Servers to the DCR + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/esi-opt6exchangemessagetrackinglogs.md b/Tools/Solutions Analyzer/connector-docs/connectors/esi-opt6exchangemessagetrackinglogs.md index 979c3f72fc7..8feef0f19f9 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/esi-opt6exchangemessagetrackinglogs.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/esi-opt6exchangemessagetrackinglogs.md @@ -10,4 +10,245 @@ [Option 6] - Using Azure Monitor Agent - You can stream all Exchange Message Tracking from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. Those logs can be used to track the flow of messages in your Exchange environment. This data connector is based on the option 6 of the [Microsoft Exchange Security wiki](https://aka.ms/ESI_DataConnectorOptions). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Azure Log Analytics will be deprecated**: Azure Log Analytics will be deprecated, to collect data from non-Azure VMs, Azure Arc is recommended. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc) +- **Detailled documentation**: >**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub) + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions) + +>This Data Connector is the **option 6** of the wiki. + +**1. Download and install the agents needed to collect logs for Microsoft Sentinel** + +Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy. +**Deploy Monitor Agents** + + This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers +**Deploy the Azure Arc Agent** +> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc) + +**2. Message Tracking of Exchange Servers** + +Select how to stream Message Tracking of Exchange Servers +**Data Collection Rules - When Azure Monitor Agent is used** + + **Enable data collection rule** +> Message Tracking are collected only from **Windows** agents. +**Option 1 - Azure Resource Manager (ARM) Template** + + Use this method for automated deployment of the DCE and DCR. +**A. Create DCE (If not already created for Exchange Servers)** + + 1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. You can change the proposed name of the DCE. +5. Click **Create** to deploy. + + **B. Deploy Data Connection Rule and Custom Table** + + 1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption6-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID** 'and/or Other required fields'. +>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + + **Option 2 - Manual Deployment of Azure Automation** + + Use the following step-by-step instructions to deploy manually a Data Collection Rule. +**Create Custom Table - Explanation** + + The Custom Table can't be created using the Azure Portal. You need to use an ARM template, a PowerShell Script or another method [described here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/create-custom-table?tabs=azure-powershell-1%2Cazure-portal-2%2Cazure-portal-3#create-a-custom-table). + + **Create Custom Table using an ARM Template** + + 1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-MessageTrackingCustomTable) +2. Select the preferred **Subscription**, **Resource Group**, **Location** and **Analytic Workspace Name**. +3. Click **Create** to deploy. + + **Create Custom Table using PowerShell in Cloud Shell** + + 1. From the Azure Portal, open a Cloud Shell. +2. Copy and paste and Execute the following script in the Cloud Shell to create the table. + $tableParams = @' + { + "properties": { + "schema": { + "name": "MessageTrackingLog_CL", + "columns": [ + { + "name": "directionality", + "type": "string" + }, + { + "name": "reference", + "type": "string" + }, + { + "name": "source", + "type": "string" + }, + { + "name": "TimeGenerated", + "type": "datetime" + }, + { + "name": "clientHostname", + "type": "string" + }, + { + "name": "clientIP", + "type": "string" + }, + { + "name": "connectorId", + "type": "string" + }, + { + "name": "customData", + "type": "string" + }, + { + "name": "eventId", + "type": "string" + }, + { + "name": "internalMessageId", + "type": "string" + }, + { + "name": "logId", + "type": "string" + }, + { + "name": "messageId", + "type": "string" + }, + { + "name": "messageInfo", + "type": "string" + }, + { + "name": "messageSubject", + "type": "string" + }, + { + "name": "networkMessageId", + "type": "string" + }, + { + "name": "originalClientIp", + "type": "string" + }, + { + "name": "originalServerIp", + "type": "string" + }, + { + "name": "recipientAddress", + "type": "string" + }, + { + "name": "recipientCount", + "type": "string" + }, + { + "name": "recipientStatus", + "type": "string" + }, + { + "name": "relatedRecipientAddress", + "type": "string" + }, + { + "name": "returnPath", + "type": "string" + }, + { + "name": "senderAddress", + "type": "string" + }, + { + "name": "senderHostname", + "type": "string" + }, + { + "name": "serverIp", + "type": "string" + }, + { + "name": "sourceContext", + "type": "string" + }, + { + "name": "schemaVersion", + "type": "string" + }, + { + "name": "messageTrackingTenantId", + "type": "string" + }, + { + "name": "totalBytes", + "type": "string" + }, + { + "name": "transportTrafficType", + "type": "string" + }, + { + "name": "FilePath", + "type": "string" + } + ] + } + } + } + '@ +3. Copy, Replace, Paste and execute the following parameters with your own values: + $SubscriptionID = 'YourGUID' + $ResourceGroupName = 'YourResourceGroupName' + $WorkspaceName = 'YourWorkspaceName' +4. Execute the Following Cmdlet to create the table: + Invoke-AzRestMethod -Path "/subscriptions/$SubscriptionID/resourcegroups/$ResourceGroupName/providers/microsoft.operationalinsights/workspaces/$WorkspaceName/tables/MessageTrackingLog_CL?api-version=2021-12-01-preview" -Method PUT -payload $tableParams +**A. Create DCE (If not already created for Exchange Servers)** + + 1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints). +2. Click **+ Create** at the top. +3. In the **Basics** tab, fill the required fields and give a name to the DCE, like ESI-ExchangeServers. +3. 'Make other preferable configuration changes', if needed, then click **Create**. + + **B. Create a DCR, Type Custom log** + + 1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules). +2. Click on 'Create' button. +3. On 'Basics' tab, fill the Rule name like **DCR-Option6-MessageTrackingLogs**, select the 'Data Collection Endpoint' with the previously created endpoint and fill other parameters. +4. In the **Resources** tab, add your Exchange Servers. +5. In **Collect and Deliver**, add a Data Source type 'Custom Text logs' and enter 'C:\Program Files\Microsoft\Exchange Server\V15\TransportRoles\Logs\MessageTracking\*.log' in file pattern, 'MessageTrackingLog_CL' in Table Name. +6.in Transform field, enter the following KQL request : + source | extend d = split(RawData,',') | extend TimeGenerated =todatetime(d[0]) ,clientIP =tostring(d[1]) ,clientHostname =tostring(d[2]) ,serverIp=tostring(d[3]) ,senderHostname=tostring(d[4]) ,sourceContext=tostring(d[5]) ,connectorId =tostring(d[6]) ,source=tostring(d[7]) ,eventId =tostring(d[8]) ,internalMessageId =tostring(d[9]) ,messageId =tostring(d[10]) ,networkMessageId =tostring(d[11]) ,recipientAddress=tostring(d[12]) ,recipientStatus=tostring(d[13]) ,totalBytes=tostring(d[14]) ,recipientCount=tostring(d[15]) ,relatedRecipientAddress=tostring(d[16]) ,reference=tostring(d[17]) ,messageSubject =tostring(d[18]) ,senderAddress=tostring(d[19]) ,returnPath=tostring(d[20]) ,messageInfo =tostring(d[21]) ,directionality=tostring(d[22]) ,messageTrackingTenantId =tostring(d[23]) ,originalClientIp =tostring(d[24]) ,originalServerIp =tostring(d[25]) ,customData=tostring(d[26]) ,transportTrafficType =tostring(d[27]) ,logId =tostring(d[28]) ,schemaVersion=tostring(d[29]) | project-away d,RawData + and click on 'Destination'. +6. In 'Destination', add a destination and select the Workspace where you have previously created the Custom Table +7. Click on 'Add data source'. +8. Fill other required parameters and tags and create the DCR + + **Assign the DCR to all Exchange Servers** + + Add all your Exchange Servers to the DCR + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/esi-opt7exchangehttpproxylogs.md b/Tools/Solutions Analyzer/connector-docs/connectors/esi-opt7exchangehttpproxylogs.md index dcf3376d331..73b3c2fafc0 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/esi-opt7exchangehttpproxylogs.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/esi-opt7exchangehttpproxylogs.md @@ -10,4 +10,423 @@ [Option 7] - Using Azure Monitor Agent - You can stream HTTP Proxy logs and Security Event logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you create custom alerts, and improve investigation. [Learn more](https://aka.ms/ESI_DataConnectorOptions) +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Azure Log Analytics will be deprecated**: Azure Log Analytics will be deprecated, to collect data from non-Azure VMs, Azure Arc is recommended. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc) +- **Detailled documentation**: >**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub) + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions) + +>This Data Connector is the **option 7** of the wiki. + +**1. Download and install the agents needed to collect logs for Microsoft Sentinel** + +Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy. +**Deploy Monitor Agents** + + This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers +**Deploy the Azure Arc Agent** +> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc) + +**2. [Option 7] HTTP Proxy of Exchange Servers** + +Select how to stream HTTP Proxy of Exchange Servers +**Data Collection Rules - When Azure Monitor Agent is used** + + **Enable data collection rule** +> Message Tracking are collected only from **Windows** agents. +**Option 1 - Azure Resource Manager (ARM) Template (Prefered Method)** + + Use this method for automated deployment of the DCE and DCR. +**A. Create DCE (If not already created for Exchange Servers)** + + 1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. You can change the proposed name of the DCE. +5. Click **Create** to deploy. + + **B. Deploy Data Connection Rule** + + 1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption7-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID** 'and/or Other required fields'. +>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + + **Option 2 - Manual Deployment of Azure Automation** + + Use the following step-by-step instructions to deploy manually a Data Collection Rule. +**Create Custom Table - Explanation** + + The Custom Table can't be created using the Azure Portal. You need to use an ARM template, a PowerShell Script or another method [described here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/create-custom-table?tabs=azure-powershell-1%2Cazure-portal-2%2Cazure-portal-3#create-a-custom-table). + + **Create Custom Table using an ARM Template** + + 1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-HTTPProxyCustomTable) +2. Select the preferred **Subscription**, **Resource Group**, **Location** and **Analytic Workspace Name**. +3. Click **Create** to deploy. + + **Create Custom Table using PowerShell in Cloud Shell** + + 1. From the Azure Portal, open a Cloud Shell. +2. Copy and paste and Execute the following script in the Cloud Shell to create the table. + $tableParams = @' + { + "properties": { + "schema": { + "name": "ExchangeHttpProxy_CL", + "columns": [ + { + "name": "AccountForestLatencyBreakup", + "type": "string" + }, + { + "name": "ActivityContextLifeTime", + "type": "string" + }, + { + "name": "ADLatency", + "type": "string" + }, + { + "name": "AnchorMailbox", + "type": "string" + }, + { + "name": "AuthenticatedUser", + "type": "string" + }, + { + "name": "AuthenticationType", + "type": "string" + }, + { + "name": "AuthModulePerfContext", + "type": "string" + }, + { + "name": "BackEndCookie", + "type": "string" + }, + { + "name": "BackEndGenericInfo", + "type": "string" + }, + { + "name": "BackendProcessingLatency", + "type": "string" + }, + { + "name": "BackendReqInitLatency", + "type": "string" + }, + { + "name": "BackendReqStreamLatency", + "type": "string" + }, + { + "name": "BackendRespInitLatency", + "type": "string" + }, + { + "name": "BackendRespStreamLatency", + "type": "string" + }, + { + "name": "BackEndStatus", + "type": "string" + }, + { + "name": "BuildVersion", + "type": "string" + }, + { + "name": "CalculateTargetBackEndLatency", + "type": "string" + }, + { + "name": "ClientIpAddress", + "type": "string" + }, + { + "name": "ClientReqStreamLatency", + "type": "string" + }, + { + "name": "ClientRequestId", + "type": "string" + }, + { + "name": "ClientRespStreamLatency", + "type": "string" + }, + { + "name": "CoreLatency", + "type": "string" + }, + { + "name": "DatabaseGuid", + "type": "string" + }, + { + "name": "EdgeTraceId", + "type": "string" + }, + { + "name": "ErrorCode", + "type": "string" + }, + { + "name": "GenericErrors", + "type": "string" + }, + { + "name": "GenericInfo", + "type": "string" + }, + { + "name": "GlsLatencyBreakup", + "type": "string" + }, + { + "name": "HandlerCompletionLatency", + "type": "string" + }, + { + "name": "HandlerToModuleSwitchingLatency", + "type": "string" + }, + { + "name": "HttpPipelineLatency", + "type": "string" + }, + { + "name": "HttpProxyOverhead", + "type": "string" + }, + { + "name": "HttpStatus", + "type": "string" + }, + { + "name": "IsAuthenticated", + "type": "string" + }, + { + "name": "KerberosAuthHeaderLatency", + "type": "string" + }, + { + "name": "MajorVersion", + "type": "string" + }, + { + "name": "Method", + "type": "string" + }, + { + "name": "MinorVersion", + "type": "string" + }, + { + "name": "ModuleToHandlerSwitchingLatency", + "type": "string" + }, + { + "name": "Organization", + "type": "string" + }, + { + "name": "PartitionEndpointLookupLatency", + "type": "string" + }, + { + "name": "Protocol", + "type": "string" + }, + { + "name": "ProtocolAction", + "type": "string" + }, + { + "name": "ProxyAction", + "type": "string" + }, + { + "name": "ProxyTime", + "type": "string" + }, + { + "name": "RequestBytes", + "type": "string" + }, + { + "name": "RequestHandlerLatency", + "type": "string" + }, + { + "name": "RequestId", + "type": "string" + }, + { + "name": "ResourceForestLatencyBreakup", + "type": "string" + }, + { + "name": "ResponseBytes", + "type": "string" + }, + { + "name": "RevisionVersion", + "type": "string" + }, + { + "name": "RouteRefresherLatency", + "type": "string" + }, + { + "name": "RoutingHint", + "type": "string" + }, + { + "name": "RoutingLatency", + "type": "string" + }, + { + "name": "RoutingStatus", + "type": "string" + }, + { + "name": "RoutingType", + "type": "string" + }, + { + "name": "ServerHostName", + "type": "string" + }, + { + "name": "ServerLocatorHost", + "type": "string" + }, + { + "name": "ServerLocatorLatency", + "type": "string" + }, + { + "name": "SharedCacheLatencyBreakup", + "type": "string" + }, + { + "name": "TargetOutstandingRequests", + "type": "string" + }, + { + "name": "TargetServer", + "type": "string" + }, + { + "name": "TargetServerVersion", + "type": "string" + }, + { + "name": "TotalAccountForestLatency", + "type": "string" + }, + { + "name": "TotalGlsLatency", + "type": "string" + }, + { + "name": "TotalRequestTime", + "type": "string" + }, + { + "name": "TotalResourceForestLatency", + "type": "string" + }, + { + "name": "TotalSharedCacheLatency", + "type": "string" + }, + { + "name": "UrlHost", + "type": "string" + }, + { + "name": "UrlQuery", + "type": "string" + }, + { + "name": "UrlStem", + "type": "string" + }, + { + "name": "UserADObjectGuid", + "type": "string" + }, + { + "name": "UserAgent", + "type": "string" + }, + { + "name": "TimeGenerated", + "type": "datetime" + }, + { + "name": "FilePath", + "type": "string" + } + ] + } + } + } + '@ +3. Copy, Replace, Paste and execute the following parameters with your own values: + $SubscriptionID = 'YourGUID' + $ResourceGroupName = 'YourResourceGroupName' + $WorkspaceName = 'YourWorkspaceName' +4. Execute the Following Cmdlet to create the table: + Invoke-AzRestMethod -Path "/subscriptions/$SubscriptionID/resourcegroups/$ResourceGroupName/providers/microsoft.operationalinsights/workspaces/$WorkspaceName/tables/ExchangeHttpProxy_CL?api-version=2021-12-01-preview" -Method PUT -payload $tableParams +**A. Create DCE (If not already created for Exchange Servers)** + + 1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints). +2. Click **+ Create** at the top. +3. In the **Basics** tab, fill the required fields and give a name to the DCE. +3. 'Make other preferable configuration changes', if needed, then click **Create**. + + **B. Create a DCR, Type Custom log** + + 1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules). +2. Click on 'Create' button. +3. On 'Basics' tab, fill the Rule name like **DCR-Option7-HTTPProxyLogs**, select the 'Data Collection Endpoint' with the previously created endpoint and fill other parameters. +4. In the **Resources** tab, add your Exchange Servers. +5. In **Collect and Deliver**, add a Data Source type 'Custom Text logs' and enter the following file pattern : + 'C:\Program Files\Microsoft\Exchange Server\V15\Logging\HttpProxy\Autodiscover\*.log','C:\Program Files\Microsoft\Exchange Server\V15\Logging\HttpProxy\Eas\*.log','C:\Program Files\Microsoft\Exchange Server\V15\Logging\HttpProxy\Ecp\*.log','C:\Program Files\Microsoft\Exchange Server\V15\Logging\HttpProxy\Ews\*.log','C:\Program Files\Microsoft\Exchange Server\V15\Logging\HttpProxy\Mapi\*.log','C:\Program Files\Microsoft\Exchange Server\V15\Logging\HttpProxy\Oab\*.log','C:\Program Files\Microsoft\Exchange Server\V15\Logging\HttpProxy\Owa\*.log','C:\Program Files\Microsoft\Exchange Server\V15\Logging\HttpProxy\OwaCalendar\*.log','C:\Program Files\Microsoft\Exchange Server\V15\Logging\HttpProxy\PowerShell\*.log','C:\Program Files\Microsoft\Exchange Server\V15\Logging\HttpProxy\RpcHttp\*.log' +6. Put 'ExchangeHttpProxy_CL' in Table Name. +7. in Transform field, enter the following KQL request : + source | extend d = split(RawData,',') | extend DateTime=todatetime(d[0]),RequestId=tostring(d[1]) ,MajorVersion=tostring(d[2]) ,MinorVersion=tostring(d[3]) ,BuildVersion=tostring(d[4]) ,RevisionVersion=tostring(d[5]) ,ClientRequestId=tostring(d[6]) ,Protocol=tostring(d[7]) ,UrlHost=tostring(d[8]) ,UrlStem=tostring(d[9]) ,ProtocolAction=tostring(d[10]) ,AuthenticationType=tostring(d[11]) ,IsAuthenticated=tostring(d[12]) ,AuthenticatedUser=tostring(d[13]) ,Organization=tostring(d[14]) ,AnchorMailbox=tostring(d[15]) ,UserAgent=tostring(d[16]) ,ClientIpAddress=tostring(d[17]) ,ServerHostName=tostring(d[18]) ,HttpStatus=tostring(d[19]) ,BackEndStatus=tostring(d[20]) ,ErrorCode=tostring(d[21]) ,Method=tostring(d[22]) ,ProxyAction=tostring(d[23]) ,TargetServer=tostring(d[24]) ,TargetServerVersion=tostring(d[25]) ,RoutingType=tostring(d[26]) ,RoutingHint=tostring(d[27]) ,BackEndCookie=tostring(d[28]) ,ServerLocatorHost=tostring(d[29]) ,ServerLocatorLatency=tostring(d[30]) ,RequestBytes=tostring(d[31]) ,ResponseBytes=tostring(d[32]) ,TargetOutstandingRequests=tostring(d[33]) ,AuthModulePerfContext=tostring(d[34]) ,HttpPipelineLatency=tostring(d[35]) ,CalculateTargetBackEndLatency=tostring(d[36]) ,GlsLatencyBreakup=tostring(d[37]) ,TotalGlsLatency=tostring(d[38]) ,AccountForestLatencyBreakup=tostring(d[39]) ,TotalAccountForestLatency=tostring(d[40]) ,ResourceForestLatencyBreakup=tostring(d[41]) ,TotalResourceForestLatency=tostring(d[42]) ,ADLatency=tostring(d[43]) ,SharedCacheLatencyBreakup=tostring(d[44]) ,TotalSharedCacheLatency=tostring(d[45]) ,ActivityContextLifeTime=tostring(d[46]) ,ModuleToHandlerSwitchingLatency=tostring(d[47]) ,ClientReqStreamLatency=tostring(d[48]) ,BackendReqInitLatency=tostring(d[49]) ,BackendReqStreamLatency=tostring(d[50]) ,BackendProcessingLatency=tostring(d[51]) ,BackendRespInitLatency=tostring(d[52]) ,BackendRespStreamLatency=tostring(d[53]) ,ClientRespStreamLatency=tostring(d[54]) ,KerberosAuthHeaderLatency=tostring(d[55]) ,HandlerCompletionLatency=tostring(d[56]) ,RequestHandlerLatency=tostring(d[57]) ,HandlerToModuleSwitchingLatency=tostring(d[58]) ,ProxyTime=tostring(d[59]) ,CoreLatency=tostring(d[60]) ,RoutingLatency=tostring(d[61]) ,HttpProxyOverhead=tostring(d[62]) ,TotalRequestTime=tostring(d[63]) ,RouteRefresherLatency=tostring(d[64]) ,UrlQuery=tostring(d[65]) ,BackEndGenericInfo=tostring(d[66]) ,GenericInfo=tostring(d[67]) ,GenericErrors=tostring(d[68]) ,EdgeTraceId=tostring(d[69]) ,DatabaseGuid=tostring(d[70]) ,UserADObjectGuid=tostring(d[71]) ,PartitionEndpointLookupLatency=tostring(d[72]) ,RoutingStatus=tostring(d[73]) | extend TimeGenerated = DateTime | project-away d,RawData,DateTime | project-away d,RawData,DateTime + and click on 'Destination'. +8. In 'Destination', add a destination and select the Workspace where you have previously created the Custom Table +9. Click on 'Add data source'. +10. Fill other required parameters and tags and create the DCR + + **Assign the DCR to all Exchange Servers** + + Add all your Exchange Servers to the DCR + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/exabeam.md b/Tools/Solutions Analyzer/connector-docs/connectors/exabeam.md index 17b3565e5a0..4df42cf465f 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/exabeam.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/exabeam.md @@ -10,4 +10,55 @@ The [Exabeam Advanced Analytics](https://www.exabeam.com/ueba/advanced-analytics-and-mitre-detect-and-stop-threats/) data connector provides the capability to ingest Exabeam Advanced Analytics events into Microsoft Sentinel. Refer to [Exabeam Advanced Analytics documentation](https://docs.exabeam.com/) for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Exabeam Advanced Analytics and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Exabeam%20Advanced%20Analytics/Parsers/ExabeamEvent.txt), on the second line of the query, enter the hostname(s) of your Exabeam Advanced Analytics device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. + +>**NOTE:** This data connector has been developed using Exabeam Advanced Analytics i54 (Syslog) + +**1. Install and onboard the agent for Linux or Windows** + +Install the agent on the server where the Exabeam Advanced Analytic logs are generated or forwarded. + +> Logs from Exabeam Advanced Analytic deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure the logs to be collected** + +Configure the custom log directory to be collected +- **Open custom logs settings** + +**3. Configure Exabeam event forwarding to Syslog** + +[Follow these instructions](https://docs.exabeam.com/en/advanced-analytics/i56/advanced-analytics-administration-guide/125351-advanced-analytics.html#UUID-7ce5ff9d-56aa-93f0-65de-c5255b682a08) to send Exabeam Advanced Analytics activity log data via syslog. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/extrahop.md b/Tools/Solutions Analyzer/connector-docs/connectors/extrahop.md index 79038e2d192..db7662c7c7b 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/extrahop.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/extrahop.md @@ -10,4 +10,125 @@ The [ExtraHop](https://extrahop.com/) Detections Data Connector enables you to import detection data from ExtraHop RevealX to Microsoft Sentinel through webhook payloads. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Azure Subscription**: Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group. +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **ExtraHop RevealX permissions**: The following is required on your ExtraHop RevealX system: + 1.Your RevealX system must be running firmware version 9.9.2 or later. + 2.Your RevealX system must be connected to ExtraHop Cloud Services. + 3.Your user account must have System Administratin privileges on RevealX 360 or Full Write privileges on RevealX Enterprise. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the ExtraHop in which logs are pushed via ExtraHop webhook and it will ingest logs into Microsoft Sentinel. Furthermore, the connector will fetch the ingested data from the custom logs table and create Threat Intelligence Indicators into Microsoft Sentinel Threat Intelligence. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias **ExtraHopDetections** and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ExtraHop/Parsers/ExtraHopDetections.yaml). The function usually takes 10-15 minutes to activate after solution installation/update. + +**STEP 1 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the ExtraHop Microsoft Sentinel data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Complete the following steps for automated deployment of the ExtraHop Detections Data Connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ExtraHop-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the values for the following parameters: + + a. **Function Name** - Enter the Function Name you want. + + b. **Workspace ID** - Enter the Workspace ID of the log analytics Workspace. + + c. **Workspace Key** - Enter the Workspace Key of the log analytics Workspace. + + d. **Detections Table Name** - Enter the name of the table used to store ExtraHop detection data. + + e. **LogLevel** - Select Debug, Info, Error, or Warning for the log level or log severity value. + + f. **AppInsightsWorkspaceResourceID** - Enter the value of the 'Log Analytic Workspace-->Properties-->Resource ID' property. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Complete the following steps to manually deploy the ExtraHop Detections Data Connector with Azure Functions (Deployment via Visual Studio Code). + +**5. 1) Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-ExtraHop-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ExtraHopXXXXX). + + e. **Select a runtime:** Choose Python 3.11 or above. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**6. 2) Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with the following respective values (case-sensitive): + + a. **Function Name** - Enter the Function Name you want. + + b. **Workspace ID** - Enter the Workspace ID of the log analytics Workspace. + + c. **Workspace Key** - Enter the Workspace Key of the log analytics Workspace. + + d. **Detections Table Name** - Enter the name of the table used to store ExtraHop detection data. + + e. **LogLevel** - Select Debug, Info, Error, or Warning for the log level or log severity value. + + f. **logAnalyticsUri (optional)** - Configure this option to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + +**STEP 2 - Post Deployment** + +**8. 1) Get the Function App endpoint** + +1. Go to the Azure function overview page and click the **"Functions"** tab. +2. Click on the function called **"ExtraHopHttpStarter"**. +3. Go to **"GetFunctionurl"** and copy the function url available under **"default (Function key)"**. +4. Replace **{functionname}** with **"ExtraHopDetectionsOrchestrator"** in copied function url. + +**9. 2) Configure a connection to Microsoft Sentinel and specify webhook payload criteria from RevealX** + +From your ExtraHop system, configure the Microsoft Sentinel integration to establish a connection between Microsoft Sentinel and ExtraHop RevealX and to create detection notification rules that will send webhook data to Microsoft Sentinel. For detailed instructions, refer to [Integrate ExtraHop RevealX with Microsoft Sentinel SIEM](https://docs.extrahop.com/current/integrations-microsoft-sentinel-siem/). + +*After notification rules have been configured and Microsoft Sentinel is receiving webhook data, the Function App is triggered and you can view ExtraHop detections from the Log Analytics workspace table named "ExtraHop_Detections_CL".* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/extrahopnetworks.md b/Tools/Solutions Analyzer/connector-docs/connectors/extrahopnetworks.md index 9ed3e24acc5..880d68fd51e 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/extrahopnetworks.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/extrahopnetworks.md @@ -10,4 +10,64 @@ The ExtraHop Reveal(x) data connector enables you to easily connect your Reveal(x) system with Microsoft Sentinel to view dashboards, create custom alerts, and improve investigation. This integration gives you the ability to gain insight into your organization's network and improve your security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **ExtraHop**: ExtraHop Discover or Command appliance with firmware version 7.8 or later with a user account that has Unlimited (administrator) privileges. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python --version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward ExtraHop Networks logs to Syslog agent** + +1. Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure to send the logs to port 514 TCP on the machine IP address. +2. Follow the directions to install the [ExtraHop Detection SIEM Connector bundle](https://aka.ms/asi-syslog-extrahop-forwarding) on your Reveal(x) system. The SIEM Connector is required for this integration. +3. Enable the trigger for **ExtraHop Detection SIEM Connector - CEF** +4. Update the trigger with the ODS syslog targets you created  +5. The Reveal(x) system formats syslog messages in Common Event Format (CEF) and then sends data to Microsoft Sentinel. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python --version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/extrahopnetworksama.md b/Tools/Solutions Analyzer/connector-docs/connectors/extrahopnetworksama.md index 0ea01f802f5..3adfa44a6c1 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/extrahopnetworksama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/extrahopnetworksama.md @@ -10,4 +10,62 @@ The ExtraHop Reveal(x) data connector enables you to easily connect your Reveal(x) system with Microsoft Sentinel to view dashboards, create custom alerts, and improve investigation. This integration gives you the ability to gain insight into your organization's network and improve your security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward ExtraHop Networks logs to Syslog agent** + + 1. Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure to send the logs to port 514 TCP on the machine IP address. +2. Follow the directions to install the [ExtraHop Detection SIEM Connector bundle](https://aka.ms/asi-syslog-extrahop-forwarding) on your Reveal(x) system. The SIEM Connector is required for this integration. +3. Enable the trigger for **ExtraHop Detection SIEM Connector - CEF** +4. Update the trigger with the ODS syslog targets you created  +5. The Reveal(x) system formats syslog messages in Common Event Format (CEF) and then sends data to Microsoft Sentinel. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/f5.md b/Tools/Solutions Analyzer/connector-docs/connectors/f5.md index 21c34c2bad7..761e25691f7 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/f5.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/f5.md @@ -10,4 +10,66 @@ The F5 firewall connector allows you to easily connect your F5 logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python --version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Common Event Format (CEF) logs to Syslog agent** + +Configure F5 to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent. + +Go to [F5 Configuring Application Security Event Logging](https://aka.ms/asi-syslog-f5-forwarding), follow the instructions to set up remote logging, using the following guidelines: + +1. Set the **Remote storage type** to CEF. +2. Set the **Protocol setting** to UDP. +3. Set the **IP address** to the Syslog server IP address. +4. Set the **port number** to 514, or the port your agent uses. +5. Set the **facility** to the one that you configured in the Syslog agent (by default, the agent sets this to local4). +6. You can set the **Maximum Query String Size** to be the same as you configured. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python --version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/f5ama.md b/Tools/Solutions Analyzer/connector-docs/connectors/f5ama.md index 1a44a779631..e759f5503cb 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/f5ama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/f5ama.md @@ -10,4 +10,67 @@ The F5 firewall connector allows you to easily connect your F5 logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Common Event Format (CEF) logs to Syslog agent** + + Configure F5 to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent. + +Go to [F5 Configuring Application Security Event Logging](https://aka.ms/asi-syslog-f5-forwarding), follow the instructions to set up remote logging, using the following guidelines: + +1. Set the **Remote storage type** to CEF. +2. Set the **Protocol setting** to UDP. +3. Set the **IP address** to the Syslog server IP address. +4. Set the **port number** to 514, or the port your agent uses. +5. Set the **facility** to the one that you configured in the Syslog agent (by default, the agent sets this to local4). +6. You can set the **Maximum Query String Size** to be the same as you configured. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/f5bigip.md b/Tools/Solutions Analyzer/connector-docs/connectors/f5bigip.md index b96fb8eb5c5..b78dd7f7cdf 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/f5bigip.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/f5bigip.md @@ -10,4 +10,22 @@ The F5 firewall connector allows you to easily connect your F5 logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Configure and connect F5 BIGIP** + +To connect your F5 BIGIP, you have to post a JSON declaration to the system’s API endpoint. For instructions on how to do this, see [Integrating the F5 BGIP with Microsoft Sentinel](https://aka.ms/F5BigIp-Integrate). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/feedly.md b/Tools/Solutions Analyzer/connector-docs/connectors/feedly.md index e6d2276f527..cce28e5f78f 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/feedly.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/feedly.md @@ -10,4 +10,125 @@ This connector allows you to ingest IoCs from Feedly. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Azure AD Application Registration**: An Azure AD App Registration with client credentials and permissions to write to the Data Collection Rule. The application must be granted 'Monitoring Metrics Publisher' role on the DCR. +- **Data Collection Endpoint and Rule**: A Data Collection Endpoint (DCE) and Data Collection Rule (DCR) must be created before deploying this connector. [See the documentation to learn more](https://learn.microsoft.com/azure/azure-monitor/logs/custom-logs-migrate). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions and the Logs Ingestion API to pull IoCs from Feedly into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +**1. Step 1 - Prepare Your Environment** + +The Feedly connector will automatically create: + +- **Custom Table**: `feedly_indicators_CL` with the required schema +- **Data Collection Endpoint (DCE)**: For ingesting data +- **Data Collection Rule (DCR)**: For processing and routing data + +No manual resource creation is required - everything will be created during deployment! + +For detailed instructions, see: [Migrate from HTTP Data Collector API to Logs Ingestion API](https://learn.microsoft.com/azure/azure-monitor/logs/custom-logs-migrate) + +**2. Step 2 - Deploy the Connector** + +The ARM template will automatically: + +1. Create a managed identity for the Azure Function +2. Assign the **Monitoring Metrics Publisher** role to the Function App on the DCR +3. Configure all necessary permissions for data ingestion + +No manual role assignments are required - everything is handled automatically during deployment! + +**3. Step 3 - Get your Feedly API token** + +Go to https://feedly.com/i/team/api and generate a new API token for the connector. + +**4. (Optional Step) Securely store credentials in Azure Key Vault** + +Azure Key Vault provides a secure mechanism to store and retrieve secrets. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App by using the `@Microsoft.KeyVault(SecretUri={Security Identifier})` schema. + +**5. Step 4 - Deploy the connector** + +Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function + +>**IMPORTANT:** Before deploying, gather the following information: +- Feedly API Token and Stream IDs + +All Azure Monitor resources (DCE, DCR, custom table, and role assignments) will be created automatically during deployment. +**Option 1 - Azure Resource Manager (ARM) Template** + + Use this method for automated deployment of the Feedly connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-Feedly-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the required parameters: + - **TableName**: Name for the custom table (default: `feedly_indicators_CL`) + - **FeedlyApiKey**: Your Feedly API token from Step 3 + - **FeedlyStreamIds**: Comma-separated list of Feedly stream IDs + - **DaysToBackfill**: Number of days to backfill (default: 7) + +>**Note**: If using Azure Key Vault secrets, use the `@Microsoft.KeyVault(SecretUri={Security Identifier})` schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + + **Option 2 - Manual Deployment of Azure Functions** + + Use the following step-by-step instructions to deploy the Feedly connector manually with Azure Functions (Deployment via Visual Studio Code). +**1. Deploy a Function App** + + **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://github.com/Azure/Azure-Sentinel/raw/refs/heads/master/Solutions/Feedly/Data%20Connectors/FeedlyAzureFunction.zip) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity Bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. FeedlyXXXX). + + e. **Select a runtime:** Choose Python 3.10. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + + **2. Configure the Function App** + + 1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **New application setting**. +3. Add each of the following application settings individually, with their respective values (case-sensitive): + - `DataCollectionEndpoint`: Will be populated automatically after DCE creation + - `DcrImmutableId`: Will be populated automatically after DCR creation + - `DcrStreamName`: `feedly_indicators_CL` + - `FeedlyApiKey`: Your Feedly API token + - `FeedlyStreamIds`: Comma-separated Feedly stream IDs + - `DaysToBackfill`: Number of days to backfill (e.g., 7) + +**Note**: The Function App uses managed identity for authentication to Azure Monitor, so no Azure AD credentials are needed. + +>**Note**: Use Azure Key Vault references for sensitive values: `@Microsoft.KeyVault(SecretUri={Security Identifier})` + +4. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/fireeyenx.md b/Tools/Solutions Analyzer/connector-docs/connectors/fireeyenx.md index 1c7bcb27251..98512275dfd 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/fireeyenx.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/fireeyenx.md @@ -10,4 +10,79 @@ The [FireEye Network Security (NX)](https://www.fireeye.com/products/network-security.html) data connector provides the capability to ingest FireEye Network Security logs into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**FireEyeNXEvent**](https://aka.ms/sentinel-FireEyeNX-parser) which is deployed with the Microsoft Sentinel Solution. + +>**NOTE:** This data connector has been developed using FEOS release v9.0 + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Configure FireEye NX to send logs using CEF** + +Complete the following steps to send data using CEF: + +2.1. Log into the FireEye appliance with an administrator account + +2.2. Click **Settings** + +2.3. Click **Notifications** + +Click **rsyslog** + +2.4. Check the **Event type** check box + +2.5. Make sure Rsyslog settings are: + +- Default format: CEF + +- Default delivery: Per event + +- Default send as: Alert + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/fireeyenxama.md b/Tools/Solutions Analyzer/connector-docs/connectors/fireeyenxama.md index 49a92d2e3ee..11adca49ca0 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/fireeyenxama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/fireeyenxama.md @@ -10,4 +10,77 @@ The [FireEye Network Security (NX)](https://www.fireeye.com/products/network-security.html) data connector provides the capability to ingest FireEye Network Security logs into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**FireEyeNXEvent**](https://aka.ms/sentinel-FireEyeNX-parser) which is deployed with the Microsoft Sentinel Solution. +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Configure FireEye NX to send logs using CEF** + + Complete the following steps to send data using CEF: + +2.1. Log into the FireEye appliance with an administrator account + +2.2. Click **Settings** + +2.3. Click **Notifications** + +Click **rsyslog** + +2.4. Check the **Event type** check box + +2.5. Make sure Rsyslog settings are: + +- Default format: CEF + +- Default delivery: Per event + +- Default send as: Alert + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/flare.md b/Tools/Solutions Analyzer/connector-docs/connectors/flare.md index de63289403c..ece617dc293 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/flare.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/flare.md @@ -10,4 +10,29 @@ [Flare](https://flare.systems/platform/) connector allows you to receive data and intelligence from Flare on Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Required Flare permissions**: only Flare organization administrators may configure the Microsoft Sentinel integration. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Creating an Alert Channel for Microsoft Sentinel** +As an organization administrator, authenticate on [Flare](https://app.flare.systems) and access the [team page](https://app.flare.systems#/team) to create a new alert channel. + + Click on 'Create a new alert channel' and select 'Microsoft Sentinel'. Enter your Shared Key And WorkspaceID. Save the Alert Channel. + For more help and details, see our [Azure configuration documentation](https://docs.microsoft.com/azure/sentinel/connect-data-sources). + - **Workspace ID**: `{0}` + - **Primary key**: `{0} ` +**2. Associating your alert channel to an alert feed** +At this point, you may configure alerts to be sent to Microsoft Sentinel the same way that you would configure regular email alerts. + + For a more detailed guide, refer to the Flare documentation. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/forcepoint-dlp.md b/Tools/Solutions Analyzer/connector-docs/connectors/forcepoint-dlp.md index 893709e05f0..1f23563b791 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/forcepoint-dlp.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/forcepoint-dlp.md @@ -10,4 +10,20 @@ The Forcepoint DLP (Data Loss Prevention) connector allows you to automatically export DLP incident data from Forcepoint DLP into Microsoft Sentinel in real-time. This enriches visibility into user activities and data loss incidents, enables further correlation with data from Azure workloads and other feeds, and improves monitoring capability with Workbooks inside Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Follow step by step instructions in the [Forcepoint DLP documentation for Microsoft Sentinel](https://frcpnt.com/dlp-sentinel) to configure this connector. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/forcepointcasb.md b/Tools/Solutions Analyzer/connector-docs/connectors/forcepointcasb.md index e4b42de2be7..d2e7672628b 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/forcepointcasb.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/forcepointcasb.md @@ -10,4 +10,63 @@ The Forcepoint CASB (Cloud Access Security Broker) Connector allows you to automatically export CASB logs and events into Microsoft Sentinel in real-time. This enriches visibility into user activities across locations and cloud applications, enables further correlation with data from Azure workloads and other feeds, and improves monitoring capability with Workbooks inside Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel. This machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Common Event Format (CEF) logs to Syslog agent** + +Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + +**5. Forcepoint integration installation guide** + +To complete the installation of this Forcepoint product integration, follow the guide linked below. + +[Installation Guide >](https://frcpnt.com/casb-sentinel) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/forcepointcasbama.md b/Tools/Solutions Analyzer/connector-docs/connectors/forcepointcasbama.md index 785c93edfd1..1ec12b6dac0 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/forcepointcasbama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/forcepointcasbama.md @@ -10,4 +10,67 @@ The Forcepoint CASB (Cloud Access Security Broker) Connector allows you to automatically export CASB logs and events into Microsoft Sentinel in real-time. This enriches visibility into user activities across locations and cloud applications, enables further correlation with data from Azure workloads and other feeds, and improves monitoring capability with Workbooks inside Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine. + + **Step B. Forward Common Event Format (CEF) logs to Syslog agent** + + Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + +**3. Forcepoint integration installation guide** + +To complete the installation of this Forcepoint product integration, follow the guide linked below. + +[Installation Guide >](https://frcpnt.com/casb-sentinel) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/forcepointcsg.md b/Tools/Solutions Analyzer/connector-docs/connectors/forcepointcsg.md index 62e55d9f04d..7799734c3da 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/forcepointcsg.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/forcepointcsg.md @@ -10,4 +10,59 @@ Forcepoint Cloud Security Gateway is a converged cloud security service that provides visibility, control, and threat protection for users and data, wherever they are. For more information visit: https://www.forcepoint.com/product/cloud-security-gateway +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +This integration requires the Linux Syslog agent to collect your Forcepoint Cloud Security Gateway Web/Email logs on port 514 TCP as Common Event Format (CEF) and forward them to Microsoft Sentinel. +- **Your Data Connector Syslog Agent Installation Command is:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` + +**2. Implementation options** + +The integration is made available with two implementations options. +**2.1 Docker Implementation** + + Leverages docker images where the integration component is already installed with all necessary dependencies. + +Follow the instructions provided in the Integration Guide linked below. + +[Integration Guide >](https://frcpnt.com/csg-sentinel) + + **2.2 Traditional Implementation** + + Requires the manual deployment of the integration component inside a clean Linux machine. + +Follow the instructions provided in the Integration Guide linked below. + +[Integration Guide >](https://frcpnt.com/csg-sentinel) +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF). + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/forcepointcsgama.md b/Tools/Solutions Analyzer/connector-docs/connectors/forcepointcsgama.md index aac96ecf582..7aaa9e7073a 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/forcepointcsgama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/forcepointcsgama.md @@ -10,4 +10,72 @@ Forcepoint Cloud Security Gateway is a converged cloud security service that provides visibility, control, and threat protection for users and data, wherever they are. For more information visit: https://www.forcepoint.com/product/cloud-security-gateway +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine. + + **Step B. Implementation options** + + The integration is made available with two implementations options. +**1. Docker Implementation** + + Leverages docker images where the integration component is already installed with all necessary dependencies. + +Follow the instructions provided in the Integration Guide linked below. + +[Integration Guide >](https://frcpnt.com/csg-sentinel) + + **2. Traditional Implementation** + + Requires the manual deployment of the integration component inside a clean Linux machine. + +Follow the instructions provided in the Integration Guide linked below. + +[Integration Guide >](https://frcpnt.com/csg-sentinel) + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF). + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/forcepointngfw.md b/Tools/Solutions Analyzer/connector-docs/connectors/forcepointngfw.md index 2fd6a85741f..743b0d4dff4 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/forcepointngfw.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/forcepointngfw.md @@ -10,4 +10,63 @@ The Forcepoint NGFW (Next Generation Firewall) connector allows you to automatically export user-defined Forcepoint NGFW logs into Microsoft Sentinel in real-time. This enriches visibility into user activities recorded by NGFW, enables further correlation with data from Azure workloads and other feeds, and improves monitoring capability with Workbooks inside Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Common Event Format (CEF) logs to Syslog agent** + +Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python - version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + +**5. Forcepoint integration installation guide** + +To complete the installation of this Forcepoint product integration, follow the guide linked below. + +[Installation Guide >](https://frcpnt.com/ngfw-sentinel) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/forcepointngfwama.md b/Tools/Solutions Analyzer/connector-docs/connectors/forcepointngfwama.md index c70a466964d..26980030987 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/forcepointngfwama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/forcepointngfwama.md @@ -10,4 +10,67 @@ The Forcepoint NGFW (Next Generation Firewall) connector allows you to automatically export user-defined Forcepoint NGFW logs into Microsoft Sentinel in real-time. This enriches visibility into user activities recorded by NGFW, enables further correlation with data from Azure workloads and other feeds, and improves monitoring capability with Workbooks inside Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine. + + **Step B. Forward Common Event Format (CEF) logs to Syslog agent** + + Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + +**3. Forcepoint integration installation guide** + +To complete the installation of this Forcepoint product integration, follow the guide linked below. + +[Installation Guide >](https://frcpnt.com/ngfw-sentinel) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/forescout-eyeinspect-for-ot-security.md b/Tools/Solutions Analyzer/connector-docs/connectors/forescout-eyeinspect-for-ot-security.md index b3554e1cc2a..349d20f7a1b 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/forescout-eyeinspect-for-ot-security.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/forescout-eyeinspect-for-ot-security.md @@ -10,4 +10,22 @@ Forescout eyeInspect for OT Security connector allows you to connect Asset/Alert information from Forescout eyeInspect OT platform with Microsoft Sentinel, to view and analyze data using Log Analytics Tables and Workbooks. This gives you more insight into OT organization network and improves security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Forescout eyeInspect OT Microsoft Sentinel Integration** + +Instructions on how to configure Forescout eyeInspect Microsoft Sentinel Integration are provided at Forescout eyeInspect Documentation Portal +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/forescout.md b/Tools/Solutions Analyzer/connector-docs/connectors/forescout.md index c4c50ac2cf3..580155e0c78 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/forescout.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/forescout.md @@ -10,4 +10,61 @@ The [Forescout](https://www.forescout.com/) data connector provides the capability to ingest [Forescout events](https://docs.forescout.com/bundle/syslog-3-6-1-h/page/syslog-3-6-1-h.How-to-Work-with-the-Syslog-Plugin.html) into Microsoft Sentinel. Refer to [Forescout documentation](https://docs.forescout.com/bundle/syslog-msg-3-6-tn/page/syslog-msg-3-6-tn.About-Syslog-Messages-in-Forescout.html) for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**ForescoutEvent**](https://aka.ms/sentinel-forescout-parser) which is deployed with the Microsoft Sentinel Solution. + +>**NOTE:** This data connector has been developed using Forescout Syslog Plugin version: v3.6 + +**1. Install and onboard the agent for Linux or Windows** + +Install the agent on the Server where the Forescout logs are forwarded. + +> Logs from Forescout Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. + 2. Select **Apply below configuration to my machines** and select the facilities and severities. + 3. Click **Save**. +- **Open Syslog settings** + +**3. Configure Forescout event forwarding** + +Follow the configuration steps below to get Forescout logs into Microsoft Sentinel. +1. [Select an Appliance to Configure.](https://docs.forescout.com/bundle/syslog-3-6-1-h/page/syslog-3-6-1-h.Select-an-Appliance-to-Configure.html) +2. [Follow these instructions](https://docs.forescout.com/bundle/syslog-3-6-1-h/page/syslog-3-6-1-h.Send-Events-To-Tab.html#pID0E0CE0HA) to forward alerts from the Forescout platform to a syslog server. +3. [Configure](https://docs.forescout.com/bundle/syslog-3-6-1-h/page/syslog-3-6-1-h.Syslog-Triggers.html) the settings in the Syslog Triggers tab. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/forescouthostpropertymonitor.md b/Tools/Solutions Analyzer/connector-docs/connectors/forescouthostpropertymonitor.md index a6018fa34a0..bf7efb00370 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/forescouthostpropertymonitor.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/forescouthostpropertymonitor.md @@ -10,4 +10,23 @@ The Forescout Host Property Monitor connector allows you to connect host/policy/compliance properties from Forescout platform with Microsoft Sentinel, to view, create custom incidents, and improve investigation. This gives you more insight into your organization network and improves your security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Forescout Plugin requirement**: Please make sure Forescout Microsoft Sentinel plugin is running on Forescout platform + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Instructions on how to configure Forescout Microsoft Sentinel plugin are provided at Forescout Documentation Portal (https://docs.forescout.com/bundle/microsoft-sentinel-module-v2-0-0-h) +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/forgerock.md b/Tools/Solutions Analyzer/connector-docs/connectors/forgerock.md index 0f9e51bdd67..9527bc39765 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/forgerock.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/forgerock.md @@ -10,4 +10,61 @@ The ForgeRock Identity Platform provides a single common auditing framework. Extract and aggregate log data across the entire platform with common audit (CAUD) event handlers and unique IDs so that it can be tracked holistically. Open and extensible, you can leverage audit logging and reporting capabilities for integration with Microsoft Sentinel via this CAUD for CEF connector. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Configuration for the ForgeRock Common Audit (CAUD) for Microsoft Sentinel** + +In ForgeRock, install and configure this Common Audit (CAUD) for Microsoft Sentinel per the documentation at https://github.com/javaservlets/SentinelAuditEventHandler. Next, in Azure, follow the below CEF steps. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Common Event Format (CEF) logs to Syslog agent** + +Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/fortinet.md b/Tools/Solutions Analyzer/connector-docs/connectors/fortinet.md index 81cff06e4f9..63caa7181a1 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/fortinet.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/fortinet.md @@ -10,4 +10,74 @@ The Fortinet firewall connector allows you to easily connect your Fortinet logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python --version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py &&sudo python cef_installer.py {0} {1}` +**2. Forward Fortinet logs to Syslog agent** + +Set your Fortinet to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine’s IP address. + + +Copy the CLI commands below and: +- Replace "server <ip address>" with the Syslog agent's IP address. +- Set the "<facility_name>" to use the facility you configured in the Syslog agent (by default, the agent sets this to local4). +- Set the Syslog port to 514, the port your agent uses. +- To enable CEF format in early FortiOS versions, you may need to run the command "set csv disable". + +For more information, go to the [Fortinet Document Library](https://aka.ms/asi-syslog-fortinet-fortinetdocumentlibrary), choose your version, and use the "Handbook" and "Log Message Reference" PDFs. + +[Learn more >](https://aka.ms/CEF-Fortinet) +- **Set up the connection using the CLI to run the following commands:**: `config log syslogd setting + set status enable +set format cef +set port 514 +set server +end` + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python --version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py &&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/fortinetama.md b/Tools/Solutions Analyzer/connector-docs/connectors/fortinetama.md index eb5a63ae740..417fc54ecfc 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/fortinetama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/fortinetama.md @@ -10,4 +10,75 @@ The Fortinet firewall connector allows you to easily connect your Fortinet logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Fortinet logs to Syslog agent** + + Set your Fortinet to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine’s IP address. + + +Copy the CLI commands below and: +- Replace "server <ip address>" with the Syslog agent's IP address. +- Set the "<facility_name>" to use the facility you configured in the Syslog agent (by default, the agent sets this to local4). +- Set the Syslog port to 514, the port your agent uses. +- To enable CEF format in early FortiOS versions, you may need to run the command "set csv disable". + +For more information, go to the [Fortinet Document Library](https://aka.ms/asi-syslog-fortinet-fortinetdocumentlibrary), choose your version, and use the "Handbook" and "Log Message Reference" PDFs. + +[Learn more >](https://aka.ms/CEF-Fortinet) + - **Set up the connection using the CLI to run the following commands:**: `config log syslogd setting + set status enable +set format cef +set port 514 +set server +end` + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/fortinetfortindrclouddataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/fortinetfortindrclouddataconnector.md index 118cb87a2f9..31d3cc02f44 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/fortinetfortindrclouddataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/fortinetfortindrclouddataconnector.md @@ -10,4 +10,48 @@ The Fortinet FortiNDR Cloud data connector provides the capability to ingest [Fortinet FortiNDR Cloud](https://docs.fortinet.com/product/fortindr-cloud) data into Microsoft Sentinel using the FortiNDR Cloud API +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **MetaStream Credentials**: **AWS Access Key Id**, **AWS Secret Access Key**, **FortiNDR Cloud Account Code** are required to retrieve event data. +- **API Credentials**: **FortiNDR Cloud API Token**, **FortiNDR Cloud Account UUID** are required to retrieve detection data. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the FortiNDR Cloud API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/Fortinet%20FortiNDR%20Cloud/Parsers/Fortinet_FortiNDR_Cloud.md) to create the Kusto function alias **Fortinet_FortiNDR_Cloud**. + +**STEP 1 - Configuration steps for the Fortinet FortiNDR Cloud Logs Collection** + +The provider should provide or link to detailed steps to configure the 'PROVIDER NAME APPLICATION NAME' API endpoint so that the Azure Function can authenticate to it successfully, get its authorization key or token, and pull the appliance's logs into Microsoft Sentinel. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Fortinet FortiNDR Cloud connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the as well as the FortiNDR Cloud API credentials (available in FortiNDR Cloud account management), readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Fortinet FortiNDR Cloud connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-FortinetFortiNDR-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**(Make sure using the same location as your Resource Group, and got the location supports Flex Consumption. +3. Enter the **Workspace ID**, **Workspace Key**, **AwsAccessKeyId**, **AwsSecretAccessKey**, and/or Other required fields. +4. Click **Create** to deploy. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/fortinetfortiweb.md b/Tools/Solutions Analyzer/connector-docs/connectors/fortinetfortiweb.md index 224aa13fa59..20b96a8a553 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/fortinetfortiweb.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/fortinetfortiweb.md @@ -10,4 +10,57 @@ The [fortiweb](https://www.fortinet.com/products/web-application-firewall/fortiweb) data connector provides the capability to ingest Threat Analytics and events into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Common Event Format (CEF) logs to Syslog agent** + +Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/fortinetfortiwebama.md b/Tools/Solutions Analyzer/connector-docs/connectors/fortinetfortiwebama.md index 9ff8a721ab7..b20a8989ada 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/fortinetfortiwebama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/fortinetfortiwebama.md @@ -10,4 +10,58 @@ The [fortiweb](https://www.fortinet.com/products/web-application-firewall/fortiweb) data connector provides the capability to ingest Threat Analytics and events into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Common Event Format (CEF) logs to Syslog agent** + + Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/garrisonultraremotelogs.md b/Tools/Solutions Analyzer/connector-docs/connectors/garrisonultraremotelogs.md index 3248fe10657..94ef436fe9f 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/garrisonultraremotelogs.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/garrisonultraremotelogs.md @@ -10,4 +10,29 @@ The [Garrison ULTRA](https://www.garrison.com/en/garrison-ultra-cloud-platform) Remote Logs connector allows you to ingest Garrison ULTRA Remote Logs into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Garrison ULTRA**: To use this data connector you must have an active [Garrison ULTRA](https://www.garrison.com/en/garrison-ultra-cloud-platform) license. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Deployment - Azure Resource Manager (ARM) Template** + +These steps outline the automated deployment of the Garrison ULTRA Remote Logs data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Frefs%2Fheads%2Fmaster%2FSolutions%2FGarrison%2520ULTRA%2FData%2520Connectors%2FGarrisonULTRARemoteLogs%2Fazuredeploy_DataCollectionResources.json) +2. Provide the required details such as Resource Group, Microsoft Sentinel Workspace and ingestion configurations +> **NOTE:** It is recommended to create a new Resource Group for deployment of these resources. +3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +4. Click **Purchase** to deploy. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/gcpauditlogsdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/gcpauditlogsdefinition.md index 3c2afdffe8d..c063614ecae 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/gcpauditlogsdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/gcpauditlogsdefinition.md @@ -10,4 +10,42 @@ The Google Cloud Platform (GCP) audit logs, ingested from Microsoft Sentinel's connector, enables you to capture three types of audit logs: admin activity logs, data access logs, and access transparency logs. Google cloud audit logs record a trail that practitioners can use to monitor access and detect potential threats across Google Cloud Platform (GCP) resources. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +#### 1. Set up your GCP environment + You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider and service account with permissions to get and consume from subscription. + Terraform provides API for the IAM that creates the resources. [Link to Terraform scripts](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation). + +**Government Cloud:** +#### 1. Set up your GCP environment + You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider and service account with permissions to get and consume from subscription. + Terraform provides API for the IAM that creates the resources. [Link to Gov Terraform scripts](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov). +- **Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment.**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### 2. Connect new collectors + To enable GCP Audit Logs for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect. +**GCP Collector Management** + +📊 **View GCP Collectors**: A management interface displays your configured Google Cloud Platform data collectors. + +➕ **Add New Collector**: Click "Add new collector" to configure a new GCP data connection. + +> 💡 **Portal-Only Feature**: This configuration interface is only available in the Microsoft Sentinel portal. + +**GCP Connection Configuration** + +When you click "Add new collector" in the portal, you'll be prompted to provide: +- **Project ID**: Your Google Cloud Platform project ID +- **Service Account**: GCP service account credentials with appropriate permissions +- **Subscription**: The Pub/Sub subscription to monitor for log data + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/gcpcdnlogsccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/gcpcdnlogsccpdefinition.md index 3e841ba1cf7..1cac7889bb7 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/gcpcdnlogsccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/gcpcdnlogsccpdefinition.md @@ -10,4 +10,55 @@ The Google Cloud Platform CDN data connector provides the capability to ingest Cloud CDN Audit logs and Cloud CDN Traffic logs into Microsoft Sentinel using the Compute Engine API. Refer the [Product overview](https://cloud.google.com/cdn/docs/overview) document for more details. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect GCP CDN to Microsoft Sentinel** +#### 1. Setup the GCP environment + Ensure to have the following resources from the GCP Console: + Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection. + For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformCDN/Data%20Connectors/README.md) for log setup and authentication setup tutorial. + + Find the Log set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPCDNLogsSetup) + & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup) + +**Government Cloud:** +#### 1. Setup the GCP environment + Ensure to have the following resources from the GCP Console: + Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection. + For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformCDN/Data%20Connectors/README.md) for log setup and authentication setup tutorial. + + Find the Log set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPCDNLogsSetup) + & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov) +- **Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment.**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### 2. Enable CDN logs + In the Google Cloud Console, enable cloud logging if not enabled previously, and save the changes. Navigate to Cloud CDN section and click on Add origin to create backends as per link provided below. + + Reference Link: [Link to documentation](https://cloud.google.com/cdn/docs/using-cdn) +#### 3. Connect new collectors + To enable GCP Cloud CDN Logs for Microsoft Sentinel, click on Add new collector button, provide the required information in the pop up and click on Connect. +**GCP Collector Management** + +📊 **View GCP Collectors**: A management interface displays your configured Google Cloud Platform data collectors. + +➕ **Add New Collector**: Click "Add new collector" to configure a new GCP data connection. + +> 💡 **Portal-Only Feature**: This configuration interface is only available in the Microsoft Sentinel portal. + +**GCP Connection Configuration** + +When you click "Add new collector" in the portal, you'll be prompted to provide: +- **Project ID**: Your Google Cloud Platform project ID +- **Service Account**: GCP service account credentials with appropriate permissions +- **Subscription**: The Pub/Sub subscription to monitor for log data + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/gcpcloudidslogsccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/gcpcloudidslogsccpdefinition.md index eaac32645d0..fc595842dbe 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/gcpcloudidslogsccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/gcpcloudidslogsccpdefinition.md @@ -10,4 +10,55 @@ The Google Cloud Platform IDS data connector provides the capability to ingest Cloud IDS Traffic logs, Threat logs and Audit logs into Microsoft Sentinel using the Google Cloud IDS API. Refer to [Cloud IDS API](https://cloud.google.com/intrusion-detection-system/docs/audit-logging#google.cloud.ids.v1.IDS) documentation for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect GCP Cloud IDS to Microsoft Sentinel** +#### 1. Setup the GCP environment + Ensure to have the following resources from the GCP Console: + Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection. + For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformIDS/Data%20Connectors/README.md) for log setup and authentication setup tutorial. + + Find the Log set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPCloudIDSLogSetup) + & the Authentication set up script: [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup) + +**Government Cloud:** +#### 1. Setup the GCP environment + Ensure to have the following resources from the GCP Console: + Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection. + For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformIDS/Data%20Connectors/README.md) for log setup and authentication setup tutorial. + + Find the Log set up script: [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPCloudIDSLogSetup) + & the Authentication set up script: [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov) +- **Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment.**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### 2. Enable IDS logs + In the Google Cloud Console, enable Cloud IDS API, if not enabled previously. Create an IDS Endpoint and save the changes. + +For more information on how to create and configure an IDS endpoint: [Link to documentation](https://cloud.google.com/intrusion-detection-system/docs/configuring-ids) +#### 3. Connect new collectors + To enable GCP IDS Logs for Microsoft Sentinel, click on Add new collector button, provide the required information in the pop up and click on Connect. +**GCP Collector Management** + +📊 **View GCP Collectors**: A management interface displays your configured Google Cloud Platform data collectors. + +➕ **Add New Collector**: Click "Add new collector" to configure a new GCP data connection. + +> 💡 **Portal-Only Feature**: This configuration interface is only available in the Microsoft Sentinel portal. + +**GCP Connection Configuration** + +When you click "Add new collector" in the portal, you'll be prompted to provide: +- **Project ID**: Your Google Cloud Platform project ID +- **Service Account**: GCP service account credentials with appropriate permissions +- **Subscription**: The Pub/Sub subscription to monitor for log data + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/gcpcloudrunlogs-connectordefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/gcpcloudrunlogs-connectordefinition.md index 8594737e075..4bff16ce2d3 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/gcpcloudrunlogs-connectordefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/gcpcloudrunlogs-connectordefinition.md @@ -10,4 +10,55 @@ The GCP Cloud Run data connector provides the capability to ingest Cloud Run request logs into Microsoft Sentinel using Pub/Sub. Refer the [Cloud Run Overview](https://cloud.google.com/run/docs/logging) for more details. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect GCP Cloud Run to Microsoft Sentinel** +#### 1. Setup the GCP environment + Ensure to have the following resources from the GCP Console: + Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection. + For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Run/Data%20Connectors/README.md) for log setup and authentication setup tutorial. + + Find the Log set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPCloudRunLogsSetup) + & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup) + +**Government Cloud:** +#### 1. Setup the GCP environment + Ensure to have the following resources from the GCP Console: + Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection. + For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Run/Data%20Connectors/README.md) for log setup and authentication setup tutorial. + + Find the Log set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPCloudRunLogsSetup) + & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov) +- **Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment.**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### 2. Enable Cloud Run logs + In the Google Cloud Console, enable cloud logging if not enabled previously, and save the changes.Deploy or update your Cloud Run services with logging enabled. + + Reference Link: [Link to documentation](https://cloud.google.com/run/docs/setup) +#### 3. Connect new collectors + To enable GCP Cloud Run Request Logs for Microsoft Sentinel, click on Add new collector button, provide the required information in the pop up and click on Connect. +**GCP Collector Management** + +📊 **View GCP Collectors**: A management interface displays your configured Google Cloud Platform data collectors. + +➕ **Add New Collector**: Click "Add new collector" to configure a new GCP data connection. + +> 💡 **Portal-Only Feature**: This configuration interface is only available in the Microsoft Sentinel portal. + +**GCP Connection Configuration** + +When you click "Add new collector" in the portal, you'll be prompted to provide: +- **Project ID**: Your Google Cloud Platform project ID +- **Service Account**: GCP service account credentials with appropriate permissions +- **Subscription**: The Pub/Sub subscription to monitor for log data + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/gcpcloudsqlccfdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/gcpcloudsqlccfdefinition.md index 69c222b0860..f61c8e4d4d1 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/gcpcloudsqlccfdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/gcpcloudsqlccfdefinition.md @@ -10,4 +10,50 @@ The GCP Cloud SQL data connector provides the capability to ingest Audit logs into Microsoft Sentinel using the GCP Cloud SQL API. Refer to [GCP cloud SQL Audit Logs](https://cloud.google.com/sql/docs/mysql/audit-logging) documentation for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect GCP Cloud SQL to Microsoft Sentinel** +#### 1. Setup the GCP environment + Ensure to have the following resources from the GCP Console: + Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection. + For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformSQL/Data%20Connectors/Readme.md) for log setup and authentication setup tutorial. + Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPCloudSQLLogsSetup/GCPCloudSQLLogsSetup.tf) +Authentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup) + +**Government Cloud:** +#### 1. Setup the GCP environment + Ensure to have the following resources from the GCP Console: + Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection. + For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformSQL/Data%20Connectors/Readme.md) for log setup and authentication setup tutorial. + Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPCloudSQLLogsSetup/GCPCloudSQLLogsSetup.tf) +Authentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov) +- **Tenant ID: A unique identifier that is used as an input in the terraform configuration within a GCP environment.**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### 2. In the Google Cloud Console, enable Cloud SQL API, if not enabled previously, and save the changes. +#### 3. Connect new collectors + To enable GCP Cloud SQL Logs for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect. +**GCP Collector Management** + +📊 **View GCP Collectors**: A management interface displays your configured Google Cloud Platform data collectors. + +➕ **Add New Collector**: Click "Add new collector" to configure a new GCP data connection. + +> 💡 **Portal-Only Feature**: This configuration interface is only available in the Microsoft Sentinel portal. + +**GCP Connection Configuration** + +When you click "Add new collector" in the portal, you'll be prompted to provide: +- **Project ID**: Your Google Cloud Platform project ID +- **Service Account**: GCP service account credentials with appropriate permissions +- **Subscription**: The Pub/Sub subscription to monitor for log data + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/gcpcomputeenginelogsccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/gcpcomputeenginelogsccpdefinition.md index 7a88cca66ee..358d948c01f 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/gcpcomputeenginelogsccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/gcpcomputeenginelogsccpdefinition.md @@ -10,4 +10,51 @@ The Google Cloud Platform Compute Engine data connector provides the capability to ingest Compute Engine Audit logs into Microsoft Sentinel using the Google Cloud Compute Engine API. Refer to [Cloud Compute Engine API](https://cloud.google.com/compute/docs/reference/rest/v1) documentation for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect GCP Compute Engine to Microsoft Sentinel** +#### 1. Setup the GCP environment + Ensure to have the following resources from the GCP Console: + Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection. + For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Compute%20Engine/Data%20Connectors/GCPComputeEngineReadme.md) for log setup and authentication setup tutorial. + Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPComputeEngineLogsSetup/GCPComputeEngineLogSetup.tf) +Authentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup) + +**Government Cloud:** +#### 1. Setup the GCP environment + Ensure to have the following resources from the GCP Console: + Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection. + For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Compute%20Engine/Data%20Connectors/GCPComputeEngineReadme.md) for log setup and authentication setup tutorial. + Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPComputeEngineLogsSetup/GCPComputeEngineLogSetup.tf) +Authentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov) +- **Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment.**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### 2. Enable Compute Engine logs + In the Google Cloud Console, enable Compute Engine API, if not enabled previously, and save the changes. +#### 3. Connect new collectors + To enable Compute Engine Logs for Microsoft Sentinel, click on Add new collector button, provide the required information in the pop up and click on Connect. +**GCP Collector Management** + +📊 **View GCP Collectors**: A management interface displays your configured Google Cloud Platform data collectors. + +➕ **Add New Collector**: Click "Add new collector" to configure a new GCP data connection. + +> 💡 **Portal-Only Feature**: This configuration interface is only available in the Microsoft Sentinel portal. + +**GCP Connection Configuration** + +When you click "Add new collector" in the portal, you'll be prompted to provide: +- **Project ID**: Your Google Cloud Platform project ID +- **Service Account**: GCP service account credentials with appropriate permissions +- **Subscription**: The Pub/Sub subscription to monitor for log data + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/gcpdnsdataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/gcpdnsdataconnector.md index ee09a263ff3..783b794f198 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/gcpdnsdataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/gcpdnsdataconnector.md @@ -14,4 +14,96 @@ The Google Cloud Platform DNS data connector provides the capability to ingest [

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

+## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **GCP service account**: GCP service account with permissions to read logs (with "logging.logEntries.list" permission) is required for GCP Logging API. Also json file with service account key is required. See the documentation to learn more about [permissions](https://cloud.google.com/logging/docs/access-control#permissions_and_roles), [creating service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts) and [creating service account key](https://cloud.google.com/iam/docs/creating-managing-service-account-keys). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the GCP API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**GCPCloudDNS**](https://aka.ms/sentinel-GCPDNSDataConnector-parser) which is deployed with the Microsoft Sentinel Solution. + +**STEP 1 - Configuring GCP and obtaining credentials** + +1. Make sure that Logging API is [enabled](https://cloud.google.com/apis/docs/getting-started#enabling_apis). + +2. [Create service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts) with Logs Viewer role (or at least with "logging.logEntries.list" permission) and [get service account key json file](https://cloud.google.com/iam/docs/creating-managing-service-account-keys). + +3. Prepare the list of GCP resources (organizations, folders, projects) to get logs from. [Learn more about GCP resources](https://cloud.google.com/resource-manager/docs/cloud-platform-resource-hierarchy). + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Azure Blob Storage connection string and container name, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-GCPDNSDataConnector-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Google Cloud Platform Resource Names**, **Google Cloud Platform Credentials File Content**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key** +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-GCPDNSDataConnector-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + RESOURCE_NAMES + CREDENTIALS_FILE_CONTENT + WORKSPACE_ID + SHARED_KEY + logAnalyticsUri (Optional) + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/gcpdnslogsccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/gcpdnslogsccpdefinition.md index 7df7820c955..6828e4379a7 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/gcpdnslogsccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/gcpdnslogsccpdefinition.md @@ -10,4 +10,54 @@ The Google Cloud Platform DNS data connector provides the capability to ingest Cloud DNS Query logs and Cloud DNS Audit logs into Microsoft Sentinel using the Google Cloud DNS API. Refer to [Cloud DNS API](https://cloud.google.com/dns/docs/reference/rest/v1) documentation for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect GCP DNS to Microsoft Sentinel** +>**NOTE:** If both Azure Function and CCP connector are running simultaneously, duplicate data is populated in the tables. +#### 1. Setup the GCP environment + Ensure to have the following resources from the GCP Console: + Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection. + For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformDNS/Data%20Connectors/README.md) for log setup and authentication setup tutorial. + Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPDNS_CCPLogsSetup) +Authentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup) + +**Government Cloud:** +#### 1. Setup the GCP environment + Ensure to have the following resources from the GCP Console: + Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection. + For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformDNS/Data%20Connectors/README.md) for log setup and authentication setup tutorial. + Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPDNS_CCPLogsSetupGov) +Authentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov) +- **Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment.**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### 2. Enable DNS logs + In the Google Cloud Console, navigate to Cloud DNS Section. Enable cloud logging if not enabled previously, and save the changes. Here, you can manage the existing zones, or create a new zone and create policies for the zone which you want to monitor. + +For more information: [Link to documentation](https://cloud.google.com/dns/docs/zones/zones-overview) +#### 3. Connect new collectors + To enable GCP DNS Logs for Microsoft Sentinel, click on Add new collector button, provide the required information in the pop up and click on Connect. +**GCP Collector Management** + +📊 **View GCP Collectors**: A management interface displays your configured Google Cloud Platform data collectors. + +➕ **Add New Collector**: Click "Add new collector" to configure a new GCP data connection. + +> 💡 **Portal-Only Feature**: This configuration interface is only available in the Microsoft Sentinel portal. + +**GCP Connection Configuration** + +When you click "Add new collector" in the portal, you'll be prompted to provide: +- **Project ID**: Your Google Cloud Platform project ID +- **Service Account**: GCP service account credentials with appropriate permissions +- **Subscription**: The Pub/Sub subscription to monitor for log data + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/gcpfirewalllogsccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/gcpfirewalllogsccpdefinition.md index 1d05fadaf9e..c32ff6691b4 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/gcpfirewalllogsccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/gcpfirewalllogsccpdefinition.md @@ -10,4 +10,48 @@ The Google Cloud Platform (GCP) firewall logs, enable you to capture network inbound and outbound activity to monitor access and detect potential threats across Google Cloud Platform (GCP) resources. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +#### 1. Set up your GCP environment + You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider and service account with permissions to get and consume from subscription. + Terraform provides API for the IAM that creates the resources. [Link to Terraform scripts](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation) + Connector tutorial: [Link to tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup) . + +**Government Cloud:** +#### 1. Set up your GCP environment + You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider and service account with permissions to get and consume from subscription. + Terraform provides API for the IAM that creates the resources. [Link to Gov Terraform scripts](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov) + Connector tutorial: [Link to tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). +- **Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment.**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### 2. Enable Firewall logs +In your GCP account, navigate to the Firewall section. Here, you can either create a new rule or edit an existing one that you want to monitor. Once you open the rule, switch the toggle button under the **Logs** section to **On**, and save the changes. + +For more information: [Link to documentation](https://cloud.google.com/firewall/docs/using-firewall-rules-logging?_gl=1*1no0nhk*_ga*NDMxNDIxODI3LjE3MjUyNjUzMzc.*_ga_WH2QY8WWF5*MTcyNTUyNzc4MS4xMS4xLjE3MjU1MjgxNTIuNDYuMC4w) +#### 3. Connect new collectors + To enable GCP Firewall Logs for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect. +**GCP Collector Management** + +📊 **View GCP Collectors**: A management interface displays your configured Google Cloud Platform data collectors. + +➕ **Add New Collector**: Click "Add new collector" to configure a new GCP data connection. + +> 💡 **Portal-Only Feature**: This configuration interface is only available in the Microsoft Sentinel portal. + +**GCP Connection Configuration** + +When you click "Add new collector" in the portal, you'll be prompted to provide: +- **Project ID**: Your Google Cloud Platform project ID +- **Service Account**: GCP service account credentials with appropriate permissions +- **Subscription**: The Pub/Sub subscription to monitor for log data + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/gcpfloadbalancerlogsccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/gcpfloadbalancerlogsccpdefinition.md index ee2b8783824..4afc924f96c 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/gcpfloadbalancerlogsccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/gcpfloadbalancerlogsccpdefinition.md @@ -10,4 +10,46 @@ Google Cloud Platform (GCP) Load Balancer logs provide detailed insights into network traffic, capturing both inbound and outbound activities. These logs are used for monitoring access patterns and identifying potential security threats across GCP resources. Additionally, these logs also include GCP Web Application Firewall (WAF) logs, enhancing the ability to detect and mitigate risks effectively. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +#### 1. Set up your GCP environment + You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider and service account with permissions to get and consume from subscription. + Terraform provides API for the IAM that creates the resources. [Link to Terraform scripts](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation). + +**Government Cloud:** +#### 1. Set up your GCP environment + You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider and service account with permissions to get and consume from subscription. + Terraform provides API for the IAM that creates the resources. [Link to Gov Terraform scripts](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov). +- **Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment.**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### 2. Enable Load Balancer logs +In your GCP account, navigate to the Load Balancer section. In here you can nevigate to [**Backend Service**] -> [**Edit**], once you are in the [**Backend Service**] on the [**Logging**] section **enable** the checkbox of [**Enable Logs**]. Once you open the rule, switch the toggle button under the **Logs** section to **On**, and save the changes. + +For more information: [Link to documentation](https://cloud.google.com/load-balancing/docs/https/https-logging-monitoring) +#### 3. Connect new collectors + To enable GCP Load Balancer Logs for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect. +**GCP Collector Management** + +📊 **View GCP Collectors**: A management interface displays your configured Google Cloud Platform data collectors. + +➕ **Add New Collector**: Click "Add new collector" to configure a new GCP data connection. + +> 💡 **Portal-Only Feature**: This configuration interface is only available in the Microsoft Sentinel portal. + +**GCP Connection Configuration** + +When you click "Add new collector" in the portal, you'll be prompted to provide: +- **Project ID**: Your Google Cloud Platform project ID +- **Service Account**: GCP service account credentials with appropriate permissions +- **Subscription**: The Pub/Sub subscription to monitor for log data + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/gcpiamccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/gcpiamccpdefinition.md index 934351a8ecf..0b675ffba28 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/gcpiamccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/gcpiamccpdefinition.md @@ -10,4 +10,54 @@ The Google Cloud Platform IAM data connector provides the capability to ingest the Audit logs relating to Identity and Access Management (IAM) activities within Google Cloud into Microsoft Sentinel using the Google IAM API. Refer to [GCP IAM API](https://cloud.google.com/iam/docs/reference/rest) documentation for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect GCP IAM to Microsoft Sentinel** +>**NOTE:** If both Azure Function and CCF connector are running parallelly, duplicate data is populated in the tables. +#### 1. Setup the GCP environment + Ensure to have the following resources from the GCP Console: + Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection. + For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformIAM/Data%20Connectors/README.md) for log setup and authentication setup tutorial. + Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPIAMCCPLogsSetup) +Authentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup) + +**Government Cloud:** +#### 1. Setup the GCP environment + Ensure to have the following resources from the GCP Console: + Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection. + For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformIAM/Data%20Connectors/README.md) for log setup and authentication setup tutorial. + Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPIAMCCPLogsSetup) +Authentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov) +- **Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment.**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### 2. To enable IAM logs + In your GCP account, navigate to the IAM section. From there, you can either create a new user or modify an existing user's role that you want to monitor. Be sure to save your changes.. + +For more information: [Link to documentation](https://cloud.google.com/assured-workloads/docs/iam-roles?hl=en) +#### 3. Connect new collectors + To enable GCPIAM Logs for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect. +**GCP Collector Management** + +📊 **View GCP Collectors**: A management interface displays your configured Google Cloud Platform data collectors. + +➕ **Add New Collector**: Click "Add new collector" to configure a new GCP data connection. + +> 💡 **Portal-Only Feature**: This configuration interface is only available in the Microsoft Sentinel portal. + +**GCP Connection Configuration** + +When you click "Add new collector" in the portal, you'll be prompted to provide: +- **Project ID**: Your Google Cloud Platform project ID +- **Service Account**: GCP service account credentials with appropriate permissions +- **Subscription**: The Pub/Sub subscription to monitor for log data + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/gcpiamdataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/gcpiamdataconnector.md index 64bbbfe9dde..a8257ad07b8 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/gcpiamdataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/gcpiamdataconnector.md @@ -14,4 +14,98 @@ The Google Cloud Platform Identity and Access Management (IAM) data connector pr

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

+## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **GCP service account**: GCP service account with permissions to read logs is required for GCP Logging API. Also json file with service account key is required. See the documentation to learn more about [required permissions](https://cloud.google.com/iam/docs/audit-logging#audit_log_permissions), [creating service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts) and [creating service account key](https://cloud.google.com/iam/docs/creating-managing-service-account-keys). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the GCP API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**GCP_IAM**](https://aka.ms/sentinel-GCPIAMDataConnector-parser) which is deployed with the Microsoft Sentinel Solution. + +**STEP 1 - Configuring GCP and obtaining credentials** + +1. Make sure that Logging API is [enabled](https://cloud.google.com/apis/docs/getting-started#enabling_apis). + +2. (Optional) [Enable Data Access Audit logs](https://cloud.google.com/logging/docs/audit/configure-data-access#config-console-enable). + +3. [Create service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts) with [required permissions](https://cloud.google.com/iam/docs/audit-logging#audit_log_permissions) and [get service account key json file](https://cloud.google.com/iam/docs/creating-managing-service-account-keys). + +4. Prepare the list of GCP resources (organizations, folders, projects) to get logs from. [Learn more about GCP resources](https://cloud.google.com/resource-manager/docs/cloud-platform-resource-hierarchy). + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Azure Blob Storage connection string and container name, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-GCPIAMDataConnector-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Google Cloud Platform Resource Names**, **Google Cloud Platform Credentials File Content**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key** +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-GCPIAMDataConnector-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + RESOURCE_NAMES + CREDENTIALS_FILE_CONTENT + WORKSPACE_ID + SHARED_KEY + logAnalyticsUri (Optional) + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/gcpmonitorccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/gcpmonitorccpdefinition.md index 00332f87311..86859587869 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/gcpmonitorccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/gcpmonitorccpdefinition.md @@ -10,4 +10,44 @@ The Google Cloud Platform Cloud Monitoring data connector ingests Monitoring logs from Google Cloud into Microsoft Sentinel using the Google Cloud Monitoring API. Refer to [Cloud Monitoring API](https://cloud.google.com/monitoring/api/v3) documentation for more details. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Google Cloud Platform Cloud Monitoring to Microsoft Sentinel** +#### 1. Setup GCP Monitoring Integration + To fetch logs from GCP Cloud Monitoring to Sentinel **Project ID** of Google cloud is required. +#### 2. Chose the **Metric Type** + To collect logs from Google Cloud Monitoring provide the required Metric type. + +For more details, refer to [Google Cloud Metrics](https://cloud.google.com/monitoring/api/metrics_gcp). +#### 3. OAuth Credentials + To Fetch Oauth client id and client secret refer to this [documentation](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Monitoring/Data%20Connectors/Readme.md). +#### 4. Connect to Sentinel + Click on **Connect** to start pulling monitoring logs from Google Cloud into Microsoft Sentinel. +- **GCP Project ID** +- **Metric Type** +- **OAuth Configuration**: + - Client ID + - Client Secret + - Click 'Connect' to authenticate +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Metric Type** +- **Project ID** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/gcpmonitordataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/gcpmonitordataconnector.md index 86d33642695..db867cda8df 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/gcpmonitordataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/gcpmonitordataconnector.md @@ -14,4 +14,97 @@ The Google Cloud Platform Cloud Monitoring data connector provides the capabilit

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

+## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **GCP service account**: GCP service account with permissions to read Cloud Monitoring metrics is required for GCP Monitoring API (required *Monitoring Viewer* role). Also json file with service account key is required. See the documentation to learn more about [creating service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts) and [creating service account key](https://cloud.google.com/iam/docs/creating-managing-service-account-keys). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the GCP API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**GCP_MONITORING**](https://aka.ms/sentinel-GCPMonitorDataConnector-parser) which is deployed with the Microsoft Sentinel Solution. + +**STEP 1 - Configuring GCP and obtaining credentials** + +1. [Create service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts) with Monitoring Viewer role and [get service account key json file](https://cloud.google.com/iam/docs/creating-managing-service-account-keys). + +2. Prepare the list of GCP projects to get metrics from. [Learn more about GCP projects](https://cloud.google.com/resource-manager/docs/cloud-platform-resource-hierarchy). + +3. Prepare the list of [GCP metric types](https://cloud.google.com/monitoring/api/metrics_gcp) + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Azure Blob Storage connection string and container name, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-GCPMonitorDataConnector-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Google Cloud Platform Project Id List**, **Google Cloud Platform Metric Types List**, **Google Cloud Platform Credentials File Content**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key** +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-GCPMonitorDataConnector-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + GCP_PROJECT_ID + GCP_METRICS + GCP_CREDENTIALS_FILE_CONTENT + WORKSPACE_ID + SHARED_KEY + logAnalyticsUri (Optional) + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/gcpnatlogsccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/gcpnatlogsccpdefinition.md index bd5e74922ae..cbb8196746a 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/gcpnatlogsccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/gcpnatlogsccpdefinition.md @@ -10,4 +10,55 @@ The Google Cloud Platform NAT data connector provides the capability to ingest Cloud NAT Audit logs and Cloud NAT Traffic logs into Microsoft Sentinel using the Compute Engine API. Refer the [Product overview](https://cloud.google.com/nat/docs/overview) document for more details. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect GCP NAT to Microsoft Sentinel** +#### 1. Setup the GCP environment + Ensure to have the following resources from the GCP Console: + Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection. + For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformNAT/Data%20Connectors/README.md) for log setup and authentication setup tutorial. + + Find the Log set up script [**here**](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPCloudNATLogsSetup/GCPCloudNATLogsSetup.tf) + & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup) + +**Government Cloud:** +#### 1. Setup the GCP environment + Ensure to have the following resources from the GCP Console: + Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection. + For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformNAT/Data%20Connectors/README.md) for log setup and authentication setup tutorial. + + Find the Log set up script [**here**](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPCloudNATLogsSetup/GCPCloudNATLogsSetup.tf) + & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov) +- **Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment.**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### 2. Enable NAT logs + In the Google Cloud Console, enable cloud logging if not enabled previously, and save the changes. Navigate to Cloud NAT section and click on Add origin to create backends as per link provided below. + + Reference Link: [Link to documentation](https://cloud.google.com/nat/docs/monitoring) +#### 3. Connect new collectors + To enable GCP Cloud NAT Logs for Microsoft Sentinel, click on Add new collector button, provide the required information in the pop up and click on Connect. +**GCP Collector Management** + +📊 **View GCP Collectors**: A management interface displays your configured Google Cloud Platform data collectors. + +➕ **Add New Collector**: Click "Add new collector" to configure a new GCP data connection. + +> 💡 **Portal-Only Feature**: This configuration interface is only available in the Microsoft Sentinel portal. + +**GCP Connection Configuration** + +When you click "Add new collector" in the portal, you'll be prompted to provide: +- **Project ID**: Your Google Cloud Platform project ID +- **Service Account**: GCP service account credentials with appropriate permissions +- **Subscription**: The Pub/Sub subscription to monitor for log data + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/gcpresourcemanagerlogsccfdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/gcpresourcemanagerlogsccfdefinition.md index ddb60bab938..7438080d503 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/gcpresourcemanagerlogsccfdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/gcpresourcemanagerlogsccfdefinition.md @@ -10,4 +10,53 @@ The Google Cloud Platform Resource Manager data connector provides the capability to ingest Resource Manager [Admin Activity and Data Access Audit logs](https://cloud.google.com/resource-manager/docs/audit-logging) into Microsoft Sentinel using the Cloud Resource Manager API. Refer the [Product overview](https://cloud.google.com/resource-manager/docs/cloud-platform-resource-hierarchy) document for more details. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect GCP Resource Manager to Microsoft Sentinel** +#### 1. Setup the GCP environment + Ensure to have the following resources from the GCP Console: + Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection. + For more information, refer the [Connector tutorial](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleCloudPlatformResourceManager/Data%20Connectors/README.md) for log setup and authentication setup tutorial. + + Find the Log set up script [**here**](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPResourceManagerLogsSetup/GCPResourceManagerLogSetup.tf) + & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup) + +**Government Cloud:** +#### 1. Setup the GCP environment + Ensure to have the following resources from the GCP Console: + Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection. + For more information, refer the [Connector tutorial](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleCloudPlatformResourceManager/Data%20Connectors/README.md) for log setup and authentication setup tutorial. + + Find the Log set up script [**here**](https://raw.githubusercontent.com/Azure/Azure-Sentinel/c1cb589dad1add228f78e629073a9b069ce52991/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPResourceManagerLogsSetup/GCPResourceManagerLogSetup.tf) + & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov) +- **Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment.**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### 2. Enable Resource Manager logs + In the Google Cloud Console, enable cloud resource manager API if not enabled previously, and save the changes. Make sure to have organization level IAM permissions for your account to see all logs in the resource hierarchy. You can refer the document links for different IAM permissions for access control with IAM at each level provided in this [link](https://cloud.google.com/resource-manager/docs/how-to) +#### 3. Connect new collectors + To enable GCP Resource Manager Logs for Microsoft Sentinel, click on Add new collector button, provide the required information in the pop up and click on Connect. +**GCP Collector Management** + +📊 **View GCP Collectors**: A management interface displays your configured Google Cloud Platform data collectors. + +➕ **Add New Collector**: Click "Add new collector" to configure a new GCP data connection. + +> 💡 **Portal-Only Feature**: This configuration interface is only available in the Microsoft Sentinel portal. + +**GCP Connection Configuration** + +When you click "Add new collector" in the portal, you'll be prompted to provide: +- **Project ID**: Your Google Cloud Platform project ID +- **Service Account**: GCP service account credentials with appropriate permissions +- **Subscription**: The Pub/Sub subscription to monitor for log data + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/gcpvpcflowlogsccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/gcpvpcflowlogsccpdefinition.md index 91454c51543..988ff5e0436 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/gcpvpcflowlogsccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/gcpvpcflowlogsccpdefinition.md @@ -10,4 +10,50 @@ The Google Cloud Platform (GCP) VPC Flow Logs enable you to capture network traffic activity at the VPC level, allowing you to monitor access patterns, analyze network performance, and detect potential threats across GCP resources. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +#### 1. Set up your GCP environment + You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription. + To configure this data connector, execute the following Terraform scripts: + 1. Setup Required Resources: [Configuration Guide](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPVPCFlowLogsSetup/readme.md) + 2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector , kindly skip this step and use the existing service account and workload identity pool. + +**Government Cloud:** +#### 1. Set up your GCP environment + You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription. + To configure this data connector, execute the following Terraform scripts: + 1. Setup Required Resources: [Configuration Guide]https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPVPCFlowLogsSetup/readme.md) + 2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector , kindly skip this step and use the existing service account and workload identity pool. +- **Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment.**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### 2. Enable VPC Flow Logs +In your GCP account, navigate to the VPC network section. Select the subnet you want to monitor and enable Flow Logs under the Logging section. + +For more information: [Google Cloud Documentation](https://cloud.google.com/vpc/docs/using-flow-logs) +#### 3. Connect new collectors + To enable GCP VPC Flow Logs for Microsoft Sentinel, click the Add new collector button, fill in the required information in the context pane, and click Connect. +**GCP Collector Management** + +📊 **View GCP Collectors**: A management interface displays your configured Google Cloud Platform data collectors. + +➕ **Add New Collector**: Click "Add new collector" to configure a new GCP data connection. + +> 💡 **Portal-Only Feature**: This configuration interface is only available in the Microsoft Sentinel portal. + +**GCP Connection Configuration** + +When you click "Add new collector" in the portal, you'll be prompted to provide: +- **Project ID**: Your Google Cloud Platform project ID +- **Service Account**: GCP service account credentials with appropriate permissions +- **Subscription**: The Pub/Sub subscription to monitor for log data + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/gigamondataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/gigamondataconnector.md index 3fe482e7cfe..f82fd86cd0b 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/gigamondataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/gigamondataconnector.md @@ -10,4 +10,24 @@ Use this data connector to integrate with Gigamon Application Metadata Exporter (AMX) and get data sent directly to Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Gigamon Data Connector** + +1. Application Metadata Exporter (AMX) application converts the output from the Application Metadata Intelligence (AMI) in CEF format into JSON format and sends it to the cloud tools and Kafka. + 2. The AMX application can be deployed only on a V Series Node and can be connected to Application Metadata Intelligence running on a physical node or a virtual machine. + 3. The AMX application and the AMI are managed by GigaVUE-FM. This application is supported on VMware ESXi, VMware NSX-T, AWS and Azure. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/githubauditdefinitionv2.md b/Tools/Solutions Analyzer/connector-docs/connectors/githubauditdefinitionv2.md index 3dc0a2f6722..da0cff7810c 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/githubauditdefinitionv2.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/githubauditdefinitionv2.md @@ -14,4 +14,47 @@ The GitHub audit log connector provides the capability to ingest GitHub logs int **Note:** If you intended to ingest GitHub subscribed events into Microsoft Sentinel, please refer to GitHub (using Webhooks) Connector from "**Data Connectors**" gallery. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **GitHub API personal access token**: To enable polling for the Enterprise audit log, ensure the authenticated user is an Enterprise admin and has a GitHub personal access token (classic) with the `read:audit_log` scope. +- **GitHub Enterprise type**: This connector will only function with GitHub Enterprise Cloud; it will not support GitHub Enterprise Server. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect the GitHub Enterprise-level Audit Log to Microsoft Sentinel** + +Enable GitHub audit logs. + Follow [this guide](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens#creating-a-personal-access-token-classic) to create or find your personal access token. +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Github Enterprise API URL** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + +**Add Enterprise** + +When you click the "Add Enterprise" button in the portal, a configuration form will open. You'll need to provide: + +Enter your Github Enterprise API URL and API key. Github Enterprise API URL formats: +* `https://api.github.com/enterprises/{enterprise}` +* `https://api.{subdomain}.ghe.com/enterprises/{enterprise}` + +- **Github Enterprise API URL** (optional): Your Github Enterprise API URL +- **API Key** (optional): Enter API Key + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/githubecauditlogpolling.md b/Tools/Solutions Analyzer/connector-docs/connectors/githubecauditlogpolling.md index 2f5950135a4..7672b449673 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/githubecauditlogpolling.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/githubecauditlogpolling.md @@ -18,4 +18,23 @@ The GitHub audit log connector provides the capability to ingest GitHub logs int

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

+## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Custom Permissions:** +- **GitHub API personal access token**: You need a GitHub personal access token to enable polling for the organization audit log. You may use either a classic token with 'read:org' scope OR a fine-grained token with 'Administration: Read-only' scope. +- **GitHub Enterprise type**: This connector will only function with GitHub Enterprise Cloud; it will not support GitHub Enterprise Server. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect the GitHub Enterprise Organization-level Audit Log to Microsoft Sentinel** + +Enable GitHub audit logs. + Follow [this guide](https://docs.github.com/en/github/authenticating-to-github/keeping-your-account-and-data-secure/creating-a-personal-access-token) to create or find your personal access token. +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `APIKey`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/githubwebhook.md b/Tools/Solutions Analyzer/connector-docs/connectors/githubwebhook.md index 8351ccd3838..56322cf253c 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/githubwebhook.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/githubwebhook.md @@ -14,4 +14,81 @@ The [GitHub](https://www.github.com) webhook data connector provides the capabil **Note:** If you are intended to ingest Github Audit logs, Please refer to GitHub Enterprise Audit Log Connector from "**Data Connectors**" gallery. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector has been built on http trigger based Azure Function. And it provides an endpoint to which github will be connected through it's webhook capability and posts the subscribed events into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Github Webhook connector, have the Workspace ID and Workspace Primary Key (can be copied from the following). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Option 1 - Azure Resource Manager (ARM) Template** + + Use this method for automated deployment of the GitHub data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-GitHubwebhookAPI-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region and deploy. +3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + + **Option 2 - Manual Deployment of Azure Functions** + + Use the following step-by-step instructions to deploy the GitHub webhook data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + 1. Download the [Azure Function App](https://aka.ms/sentinel-GitHubWebhookAPI-functionapp) file. Extract archive to your local development computer. +2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode. +3. After successful deployment of the function app, follow next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. Go to Azure Portal for the Function App configuration. +2. In the Function App, select the Function App Name and select **Configuration**. +3. In the **Application settings** tab, select ** New application setting**. +4. Add each of the following application settings individually, with their respective string values (case-sensitive): + WorkspaceID + WorkspaceKey + logAnalyticsUri (optional) - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + +**Post Deployment steps** + +**STEP 1 - To get the Azure Function url** + + 1. Go to Azure function Overview page and Click on "Functions" in the left blade. + 2. Click on the function called "GithubwebhookConnector". + 3. Go to "GetFunctionurl" and copy the function url. + + **STEP 2 - Configure Webhook to Github Organization** + + 1. Go to [GitHub](https://www.github.com) and open your account and click on "Your Organizations." + 2. Click on Settings. + 3. Click on "Webhooks" and enter the function app url which was copied from above STEP 1 under payload URL textbox. + 4. Choose content type as "application/json". + 5. Subscribe for events and Click on "Add Webhook" + +*Now we are done with the github Webhook configuration. Once the github events triggered and after the delay of 20 to 30 mins (As there will be a dealy for LogAnalytics to spin up the resources for the first time), you should be able to see all the transactional events from the Github into LogAnalytics workspace table called "githubscanaudit_CL".* + + For more details, Click [here](https://aka.ms/sentinel-gitHubwebhooksteps) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/gitlab.md b/Tools/Solutions Analyzer/connector-docs/connectors/gitlab.md index de2b96bf29d..3ac4bb76ce2 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/gitlab.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/gitlab.md @@ -10,4 +10,43 @@ The [GitLab](https://about.gitlab.com/solutions/devops-platform/) connector allows you to easily connect your GitLab (GitLab Enterprise Edition - Standalone) logs with Microsoft Sentinel. This gives you more security insight into your organization's DevOps pipelines. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Configuration** + +>This data connector depends on three parsers based on a Kusto Function to work as expected [**GitLab Access Logs**](https://aka.ms/sentinel-GitLabAccess-parser), [**GitLab Audit Logs**](https://aka.ms/sentinel-GitLabAudit-parser) and [**GitLab Application Logs**](https://aka.ms/sentinel-GitLabApp-parser) which are deployed with the Microsoft Sentinel Solution. + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + +1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. +2. Select **Apply below configuration to my machines** and select the facilities and severities. +3. Click **Save**. +- **Open Syslog settings** + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/gkeccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/gkeccpdefinition.md index 06d9c1b85cb..51b6a80f75f 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/gkeccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/gkeccpdefinition.md @@ -10,4 +10,43 @@ The Google Kubernetes Engine (GKE) Logs enable you to capture cluster activity, workload behavior, and security events, allowing you to monitor Kubernetes workloads, analyze performance, and detect potential threats across GKE clusters. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +#### 1. Set up your GCP environment +You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription. + +To configure this data connector, execute the following Terraform scripts: + +1. Setup Required Resources: [Configuration Guide](https://github.com/Alekhya0824/GithubValidationREPO/blob/main/gke/Readme.md) +2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector, kindly skip this step and use the existing service account and workload identity pool. +- **Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment.**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### 2. Enable Kubernetes Engine Logging +In your GCP account, navigate to the Kubernetes Engine section. Enable Cloud Logging for your clusters. Within Cloud Logging, ensure that the specific logs you want to ingest—such as API server, scheduler, controller manager, HPA decision, and application logs—are enabled for effective monitoring and security analysis. +#### 3. Connect new collectors +To enable GKE Logs for Microsoft Sentinel, click the **Add new collector** button, fill in the required information in the context pane, and click **Connect**. +**GCP Collector Management** + +📊 **View GCP Collectors**: A management interface displays your configured Google Cloud Platform data collectors. + +➕ **Add New Collector**: Click "Add new collector" to configure a new GCP data connection. + +> 💡 **Portal-Only Feature**: This configuration interface is only available in the Microsoft Sentinel portal. + +**GCP Connection Configuration** + +When you click "Add new collector" in the portal, you'll be prompted to provide: +- **Project ID**: Your Google Cloud Platform project ID +- **Service Account**: GCP service account credentials with appropriate permissions +- **Subscription**: The Pub/Sub subscription to monitor for log data + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/googleapigeexlogsccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/googleapigeexlogsccpdefinition.md index 6c70a7d3e4e..a60989f4a11 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/googleapigeexlogsccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/googleapigeexlogsccpdefinition.md @@ -10,4 +10,51 @@ The Google ApigeeX data connector provides the capability to ingest Audit logs into Microsoft Sentinel using the Google Apigee API. Refer to [Google Apigee API](https://cloud.google.com/apigee/docs/reference/apis/apigee/rest/?apix=true) documentation for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Google ApigeeX to Microsoft Sentinel** +#### 1. Setup the GCP environment + Ensure to have the following resources from the GCP Console: + Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection. + For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Apigee/Data%20Connectors/ApigeeXReadme.md) for log setup and authentication setup tutorial. + Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPApigeeLogSetup) +Authentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup) + +**Government Cloud:** +#### 1. Setup the GCP environment + Ensure to have the following resources from the GCP Console: + Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection. + For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Apigee/Data%20Connectors/ApigeeXReadme.md) for log setup and authentication setup tutorial. + Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPApigeeLogSetup) +Authentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov) +- **Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment.**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### 2. Enable ApigeeX logs + In the Google Cloud Console, enable Apigee API, if not enabled previously, and save the changes. +#### 3. Connect new collectors + To enable ApigeeX Logs for Microsoft Sentinel, click on Add new collector button, provide the required information in the pop up and click on Connect. +**GCP Collector Management** + +📊 **View GCP Collectors**: A management interface displays your configured Google Cloud Platform data collectors. + +➕ **Add New Collector**: Click "Add new collector" to configure a new GCP data connection. + +> 💡 **Portal-Only Feature**: This configuration interface is only available in the Microsoft Sentinel portal. + +**GCP Connection Configuration** + +When you click "Add new collector" in the portal, you'll be prompted to provide: +- **Project ID**: Your Google Cloud Platform project ID +- **Service Account**: GCP service account credentials with appropriate permissions +- **Subscription**: The Pub/Sub subscription to monitor for log data + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/googlesccdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/googlesccdefinition.md index 47ea6aa42f4..4d5eded1571 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/googlesccdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/googlesccdefinition.md @@ -10,4 +10,38 @@ The Google Cloud Platform (GCP) Security Command Center is a comprehensive security and risk management platform for Google Cloud, ingested from Sentinel's connector. It offers features such as asset inventory and discovery, vulnerability and threat detection, and risk mitigation and remediation to help you gain insight into your organization's security and data attack surface. This integration enables you to perform tasks related to findings and assets more effectively. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +#### 1. Set up your GCP environment + You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider and service account with permissions to get and consume from subscription. + Terraform provides API for the IAM that creates the resources. [Link to Terraform scripts](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation). +- **Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment.**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### 2. Connect new collectors + To enable GCP SCC for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect. +**GCP Collector Management** + +📊 **View GCP Collectors**: A management interface displays your configured Google Cloud Platform data collectors. + +➕ **Add New Collector**: Click "Add new collector" to configure a new GCP data connection. + +> 💡 **Portal-Only Feature**: This configuration interface is only available in the Microsoft Sentinel portal. + +**GCP Connection Configuration** + +When you click "Add new collector" in the portal, you'll be prompted to provide: +- **Project ID**: Your Google Cloud Platform project ID +- **Service Account**: GCP service account credentials with appropriate permissions +- **Subscription**: The Pub/Sub subscription to monitor for log data + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/googleworkspaceccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/googleworkspaceccpdefinition.md index 2bbc817f2f4..d43913766a7 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/googleworkspaceccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/googleworkspaceccpdefinition.md @@ -10,4 +10,51 @@ The [Google Workspace](https://workspace.google.com/) Activities data connector provides the capability to ingest Activity Events from [Google Workspace API](https://developers.google.com/admin-sdk/reports/reference/rest/v1/activities/list) into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **Google Workspace API access**: Access to the Google Workspace activities API through Oauth are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect to Google Workspace to start collecting user activity logs into Microsoft Sentinel** +#### Configuration steps for the Google Reports API + +1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com. +2. Using the search option (available at the top middle), Search for ***APIs & Services*** +3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project. + 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps: + 1. Provide App Name and other mandatory information. + 2. Add authorized domains with API Access Enabled. + 3. In Scopes section, add **Admin SDK API** scope. + 4. In Test Users section, make sure the domain admin account is added. + 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID + 1. Click on Create Credentials on the top and select Oauth client Id. + 2. Select Web Application from the Application Type drop down. + 3. Provide a suitable name to the Web App and add https://portal.azure.com/TokenAuthorize/ExtensionName/Microsoft_Azure_Security_Insights as the Authorized redirect URIs. + 4. Once you click Create, you will be provided with the Client ID and Client Secret. + Copy these values and use them in the configuration steps below. +Configure steps for the Google Reports API oauth access. Then, provide the required information below and click on Connect. +> +- **OAuth Configuration**: + - Client ID + - Client Secret + - Click 'Connect' to authenticate +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Name** +- **ID** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/googleworkspacereportsapi.md b/Tools/Solutions Analyzer/connector-docs/connectors/googleworkspacereportsapi.md index 7703bbfdf40..65cd1f25ef9 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/googleworkspacereportsapi.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/googleworkspacereportsapi.md @@ -14,4 +14,123 @@ The [Google Workspace](https://workspace.google.com/) data connector provides th

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

+## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. + +**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String** + +1. [Python 3 or above](https://www.python.org/downloads/) is installed. +2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available. +3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1). +4. A Google account in that domain with administrator privileges. + +**STEP 2 - Configuration steps for the Google Reports API** + +1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com. +2. Using the search option (available at the top middle), Search for ***APIs & Services*** +3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project. + 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps: + 1. Provide App Name and other mandatory information. + 2. Add authorized domains with API Access Enabled. + 3. In Scopes section, add **Admin SDK API** scope. + 4. In Test Users section, make sure the domain admin account is added. + 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID + 1. Click on Create Credentials on the top and select Oauth client Id. + 2. Select Web Application from the Application Type drop down. + 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs. + 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to "**credentials.json**". + 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved. + 1. When popped up for sign-in, use the domain admin account credentials to login. +>**Note:** This script is supported only on Windows operating system. + 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step. + +**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**6. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Google Workspace data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**7. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX). + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select ** New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + GooglePickleString + WorkspaceID + WorkspaceKey + logAnalyticsUri (optional) +4. (Optional) Change the default delays if required. + + > **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. + Fetch Delay - 10 minutes + Calendar Fetch Delay - 6 hours + Chat Fetch Delay - 1 day + User Accounts Fetch Delay - 3 hours + Login Fetch Delay - 6 hours + +5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +6. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/greynoise2sentinelapi.md b/Tools/Solutions Analyzer/connector-docs/connectors/greynoise2sentinelapi.md index 7be6a081c3f..68272614aac 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/greynoise2sentinelapi.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/greynoise2sentinelapi.md @@ -10,4 +10,62 @@ This Data Connector installs an Azure Function app to download GreyNoise indicators once per day and inserts them into the ThreatIntelligenceIndicator table in Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permissions are required. +- **Workspace** (Workspace): read and write permissions on the workspace are required. + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **GreyNoise API Key**: Retrieve your GreyNoise API Key [here](https://viz.greynoise.io/account/api-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. You can connect GreyNoise Threat Intelligence to Microsoft Sentinel by following the below steps:** + +> The following steps create an Azure AAD application, retrieves a GreyNoise API key, and saves the values in an Azure Function App Configuration. + +**1. Retrieve your API Key from GreyNoise Visualizer.** + +Generate an API key from GreyNoise Visualizer https://docs.greynoise.io/docs/using-the-greynoise-api + +**2. In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID and Client ID. Also, get the Log Analytics Workspace ID associated with your Microsoft Sentinel instance (it should display below).** + +Follow the instructions here to create your Azure AAD app and save your Client ID and Tenant ID: https://learn.microsoft.com/en-us/azure/sentinel/connect-threat-intelligence-upload-api#instructions + NOTE: Wait until step 5 to generate your client secret. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Assign the AAD application the Microsoft Sentinel Contributor Role.** + +Follow the instructions here to add the Microsoft Sentinel Contributor Role: https://learn.microsoft.com/en-us/azure/sentinel/connect-threat-intelligence-upload-api#assign-a-role-to-the-application + +**4. Specify the AAD permissions to enable MS Graph API access to the upload-indicators API.** + +Follow this section here to add **'ThreatIndicators.ReadWrite.OwnedBy'** permission to the AAD App: https://learn.microsoft.com/en-us/azure/sentinel/connect-threat-intelligence-tip#specify-the-permissions-required-by-the-application. + Back in your AAD App, ensure you grant admin consent for the permissions you just added. + Finally, in the 'Tokens and APIs' section, generate a client secret and save it. You will need it in Step 6. + +**5. Deploy the Threat Intelligence (Preview) Solution, which includes the Threat Intelligence Upload Indicators API (Preview)** + +See Microsoft Sentinel Content Hub for this Solution, and install it in the Microsoft Sentinel instance. + +**6. Deploy the Azure Function** + +Click the Deploy to Azure button. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-GreyNoise-azuredeploy) + + Fill in the appropriate values for each parameter. **Be aware** that the only valid values for the **GREYNOISE_CLASSIFICATIONS** parameter are **benign**, **malicious** and/or **unknown**, which must be comma-separated. + +**7. Send indicators to Sentinel** + +The function app installed in Step 6 queries the GreyNoise GNQL API once per day, and submits each indicator found in STIX 2.1 format to the [Microsoft Upload Threat Intelligence Indicators API](https://learn.microsoft.com/en-us/azure/sentinel/upload-indicators-api). + Each indicator expires in ~24 hours from creation unless found on the next day's query. In this case the TI Indicator's **Valid Until** time is extended for another 24 hours, which keeps it active in Microsoft Sentinel. + + For more information on the GreyNoise API and the GreyNoise Query Language (GNQL), [click here](https://developer.greynoise.io/docs/using-the-greynoise-api). + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/holmsecurityassets.md b/Tools/Solutions Analyzer/connector-docs/connectors/holmsecurityassets.md index 131d52fd10a..3c0d9b8ab56 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/holmsecurityassets.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/holmsecurityassets.md @@ -10,4 +10,49 @@ The connector provides the capability to poll data from Holm Security Center into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Holm Security API Token**: Holm Security API Token is required. [Holm Security API Token](https://support.holmsecurity.com/) + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to a Holm Security Assets to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Configuration steps for the Holm Security API** + + [Follow these instructions](https://support.holmsecurity.com/knowledge/how-do-i-set-up-an-api-token) to create an API authentication token. + +**STEP 2 - Use the below deployment option to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Holm Security connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Holm Security API authorization Token, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Azure Resource Manager (ARM) Template Deployment** + +**Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Holm Security connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-holmsecurityassets-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password**, 'and/or Other required fields'. +>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/hvpollingidazurefunctions.md b/Tools/Solutions Analyzer/connector-docs/connectors/hvpollingidazurefunctions.md index 0f94d148198..dac0a0c049d 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/hvpollingidazurefunctions.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/hvpollingidazurefunctions.md @@ -10,4 +10,94 @@ Through the API integration, you have the capability to retrieve all the issues related to your HackerView organizations via a RESTful interface. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to a '' to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Configuration steps for the 'HackerView' API** + +The provider should provide or link to detailed steps to configure the 'HackerView' API endpoint so that the Azure Function can authenticate to it successfully, get its authorization key or token, and pull the appliance's logs into Microsoft Sentinel. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the 'HackerView' connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the 'HackerView' API authorization key(s) readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the 'HackerView' connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CTM360-HV-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CTM360-HV-azuredeploy-gov) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID**, **Workspace Key**, **API **, 'and/or Other required fields'. +>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**Option 2 - Manual Deployment of Azure Functions** + + Use the following step-by-step instructions to deploy the 'HackerView' connector manually with Azure Functions. + +**5. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the CTM360 CBS data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development. + +1. Download the [Azure Function App](https://raw.githubusercontent.com/CTM360-Integrations/Azure-Sentinel/ctm360-HV-CBS-azurefunctionapp/Solutions/CTM360/Data%20Connectors/HackerView/AzureFunctionCTM360_HV.zip) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CTIXYZ). + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + CTM360AccountID + WorkspaceID + WorkspaceKey + CTM360Key + FUNCTION_NAME + logAnalyticsUri - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +3. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/hyasprotect.md b/Tools/Solutions Analyzer/connector-docs/connectors/hyasprotect.md index dbe2f82658a..62451757491 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/hyasprotect.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/hyasprotect.md @@ -10,4 +10,81 @@ HYAS Protect provide logs based on reputation values - Blocked, Malicious, Permitted, Suspicious. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **HYAS API Key** is required for making API calls. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the HYAS API to pull Logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**1. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the HYAS Protect data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-HYASProtect-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Function Name**, **Table Name**, **Workspace ID**, **Workspace Key**, **API Key**, **TimeInterval**, **FetchBlockedDomains**, **FetchMaliciousDomains**, **FetchSuspiciousDomains**, **FetchPermittedDomains** and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**2. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the HYAS Protect Logs data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> NOTE:You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-HYASProtect-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. HyasProtectLogsXXX). + + e. **Select a runtime:** Choose Python 3.8. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + APIKey + Polling + WorkspaceID + WorkspaceKey +. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/iboss.md b/Tools/Solutions Analyzer/connector-docs/connectors/iboss.md index 681631bb479..d38d1f1cb9d 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/iboss.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/iboss.md @@ -10,4 +10,64 @@ The [iboss](https://www.iboss.com) data connector enables you to seamlessly connect your Threat Console to Microsoft Sentinel and enrich your instance with iboss URL event logs. Our logs are forwarded in Common Event Format (CEF) over Syslog and the configuration required can be completed on the iboss platform without the use of a proxy. Take advantage of our connector to garner critical data points and gain insight into security threats. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Configure a dedicated proxy Linux machine** + +If using the iboss gov environment or there is a preference to forward the logs to a dedicated proxy Linux machine, proceed with this step. In all other cases, please advance to step two. +**1.1 Linux Syslog agent configuration** + + Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace + + **1.2 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the dedicated proxy Linux machine between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.3 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version + +> 2. You must have elevated permissions (sudo) on your machine + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Common Event Format (CEF) logs** + +Set your Threat Console to send Syslog messages in CEF format to your Azure workspace. Make note of your Workspace ID and Primary Key within your Log Analytics Workspace (Select the workspace from the Log Analytics workspaces menu in the Azure portal. Then select Agents management in the Settings section). + +>1. Navigate to Reporting & Analytics inside your iboss Console + +>2. Select Log Forwarding -> Forward From Reporter + +>3. Select Actions -> Add Service + +>4. Toggle to Microsoft Sentinel as a Service Type and input your Workspace ID/Primary Key along with other criteria. If a dedicated proxy Linux machine has been configured, toggle to Syslog as a Service Type and configure the settings to point to your dedicated proxy Linux machine + +>5. Wait one to two minutes for the setup to complete + +>6. Select your Microsoft Sentinel Service and verify the Microsoft Sentinel Setup Status is Successful. If a dedicated proxy Linux machine has been configured, you may proceed with validating your connection + +**3. Validate connection** + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy (Only applicable if a dedicated proxy Linux machine has been configured). + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ibossama.md b/Tools/Solutions Analyzer/connector-docs/connectors/ibossama.md index 0836cffe218..71a48a312db 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ibossama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ibossama.md @@ -10,4 +10,63 @@ The [iboss](https://www.iboss.com) data connector enables you to seamlessly connect your Threat Console to Microsoft Sentinel and enrich your instance with iboss URL event logs. Our logs are forwarded in Common Event Format (CEF) over Syslog and the configuration required can be completed on the iboss platform without the use of a proxy. Take advantage of our connector to garner critical data points and gain insight into security threats. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Configure AMA Data Connector** + +Steps to configure the iboss AMA Data Connector +**Kindly follow the steps to configure the data connector** + +**Step A. Gather Required Configuration Details in Azure Arc** + + 1. Navigate to Azure Arc ---> Azure Arc Resources ---> Machines. + +2. Add a machine ---> Add a single server ---> Generate script. + +3. Select the resource group, this should be the same group as the Log Analytics Workspace for your Microsoft Sentinel instance you will be using + +4. Select a region and ensure it is in the same region as your Log Analytics Workspace + +5. Select Linux as Operating System + +6. Click Next + +7. Download the script and use this information for the next step when configuring your Microsoft Sentinel AMA integration iboss side. + +8. Navigate to the Log Analytics Workspace of your Microsoft Sentinel instance and find it's resource group, workspace name, and workspace id + + **Step B. Forward Common Event Format (CEF) logs** + + Set your Threat Console to send Syslog messages in CEF format to your Azure workspace. (Ensure you have the information gathered from the previous section) + +>1. Navigate to the Integrations Marketplace inside your iboss Console + +>2. Select Microsoft Sentinel AMA Log Forwarding + +>3. Select Add Integration + +4. Use the information from the script and your log analytics workspace to configure the integration. + +5. Add the integration + +>6. An email with be sent to your iboss alerts email to authenticate. Please do so within five minutes + +7. After authenticating, wait 15 to 20 minutes and ensure the Microsoft Sentinel Status of your integration is successful. + + **Step C. Validate connection** + + 1. Follow the instructions to validate your connectivity: + +2. Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +3. It may take about 20 minutes until the connection streams data to your workspace. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/illumiocore.md b/Tools/Solutions Analyzer/connector-docs/connectors/illumiocore.md index 1ec1e8ff840..39d0be5b579 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/illumiocore.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/illumiocore.md @@ -10,4 +10,79 @@ The [Illumio Core](https://www.illumio.com/products/) data connector provides the capability to ingest Illumio Core logs into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias IllumioCoreEvent and load the function code or click [here](https://aka.ms/sentinel-IllumioCore-parser).The function usually takes 10-15 minutes to activate after solution installation/update and maps Illumio Core events to Microsoft Sentinel Information Model (ASIM). + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Configure Ilumio Core to send logs using CEF** + +2.1 Configure Event Format + + 1. From the PCE web console menu, choose **Settings > Event Settings** to view your current settings. + + 2. Click **Edit** to change the settings. + + 3. Set **Event Format** to CEF. + + 4. (Optional) Configure **Event Severity** and **Retention Period**. + +2.2 Configure event forwarding to an external syslog server + + 1. From the PCE web console menu, choose **Settings > Event Settings**. + + 2. Click **Add**. + + 3. Click **Add Repository**. + + 4. Complete the **Add Repository** dialog. + + 5. Click **OK** to save the event forwarding configuration. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/illumiocoreama.md b/Tools/Solutions Analyzer/connector-docs/connectors/illumiocoreama.md index 2e2b0be9cf5..769de80f713 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/illumiocoreama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/illumiocoreama.md @@ -10,4 +10,79 @@ The [Illumio Core](https://www.illumio.com/products/) data connector provides the capability to ingest Illumio Core logs into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias IllumioCoreEvent and load the function code or click [here](https://aka.ms/sentinel-IllumioCore-parser).The function usually takes 10-15 minutes to activate after solution installation/update and maps Illumio Core events to Microsoft Sentinel Information Model (ASIM). +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine. + + **Step B. Configure Ilumio Core to send logs using CEF** + + Configure Event Format + + 1. From the PCE web console menu, choose **Settings > Event Settings** to view your current settings. + + 2. Click **Edit** to change the settings. + + 3. Set **Event Format** to CEF. + + 4. (Optional) Configure **Event Severity** and **Retention Period**. + +Configure event forwarding to an external syslog server + + 1. From the PCE web console menu, choose **Settings > Event Settings**. + + 2. Click **Add**. + + 3. Click **Add Repository**. + + 4. Complete the **Add Repository** dialog. + + 5. Click **OK** to save the event forwarding configuration. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/illumioinsightsdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/illumioinsightsdefinition.md index 532a4cb1240..9d7477b875f 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/illumioinsightsdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/illumioinsightsdefinition.md @@ -10,4 +10,35 @@ Illumio Insights Connector sends workload and security graph data from Illumio Insights into the Azure Microsoft Sentinel Data Lake, providing deep context for threat detection, lateral movement analysis, and real-time investigation. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +#### Configuration steps for the Illumio Insights Connector + +**Prerequisites** +- Register and Login to Illumio Console with valid credentials +- Purchase Illumio Insights or Start a free Trial for Illumio Insights + +**Step 1: Register the Service Account** +1. Go to **Illumio Console → Access → Service Accounts** +2. Create a service account for the tenant +3. Once you create a service account, you will receive the client credentials +4. Copy the **auth_username** (Illumio Insights API Key) and the **Secret** (API Secret) + +**Step 2: Add Client Credentials to Sentinel Account** +- Add the API key and secret to Sentinel Account for tenant authentication +- These credentials will be used to authenticate calls to the Illumio SaaS API + +Please fill in the required fields below with the credentials obtained from the Illumio Console: +- **Illumio Insights Api Key**: (password field) +- **Api Secret**: (password field) +- **Illumio Tenant Id**: {IllumioTenantId - Optional} +- Click 'Connect' to establish connection + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/illumioinsightssummaryccp.md b/Tools/Solutions Analyzer/connector-docs/connectors/illumioinsightssummaryccp.md index 44df4cabaca..fc013cc7aab 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/illumioinsightssummaryccp.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/illumioinsightssummaryccp.md @@ -10,4 +10,44 @@ The Illumio Insights Summary connector Publishes AI-powered threat discovery and anomaly reports generated by the Illumio Insights Agent. Leveraging the MITRE ATT&CK framework, these reports surface high-fidelity insights into emerging threats and risky behaviors, directly into the Data Lake. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Configuration** + +Configure the Illumio Insights Summary connector. +> This data connector may take 24 hrs for the latest report after onboarding +#### Configuration steps for the Illumio Insights Summary Connector + +**Prerequisites** +- Register and Login to Illumio Console with valid credentials +- Purchase Illumio Insights or Start a free Trial for Illumio Insights +- Enable The Illumio Insights Agent + +**Step 1: Register the Service Account** +1. Go to **Illumio Console → Access → Service Accounts** +2. Create a service account for the tenant +3. Once you create a service account, you will receive the client credentials +4. Copy the **auth_username** (Illumio Insights API Key) and the **Secret** (API Secret) + +**Step 2: Add Client Credentials to Sentinel Account** +- Add the API key and secret to Sentinel Account for tenant authentication +- These credentials will be used to authenticate calls to the Illumio SaaS API + +Please fill in the required fields below with the credentials obtained from the Illumio Console: +- **Illumio Insights Api Key**: (password field) +- **Api Secret**: (password field) +- **Illumio Tenant ID**: {IllumioTenantId - Optional} + +**2. Connect** + +Enable the Illumio Insights Summary connector. +- Click 'Connect' to establish connection + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/illumiosaasccfdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/illumiosaasccfdefinition.md index f6c8f41da42..9309fb36a67 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/illumiosaasccfdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/illumiosaasccfdefinition.md @@ -10,4 +10,49 @@ The Illumio Saas Cloud data connector provides the capability to ingest Flow logs into Microsoft Sentinel using the Illumio Saas Log Integration through AWS S3 Bucket. Refer to [Illumio Saas Log Integration](https://product-docs-repo.illumio.com/Tech-Docs/CloudSecure/out/en/administer-cloudsecure/connector.html#UUID-c14edaab-9726-1f23-9c4c-bc2937be39ee_section-idm234556433515698) for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Illumio Saas to Microsoft Sentinel** +>**NOTE:** This connector fetches the Illumio Saas Flow logs from AWS S3 bucket +To gather data from Illumio, you need to configure the following resources +#### 1. AWS Role ARN + To gather data from Illumio, you'll need AWS Role ARN. +#### 2. AWS SQS Queue URL + To gather data from Illumio, you'll need AWS SQS Queue URL. + + +For detailed steps to retrieve the AWS Role ARN, SQS Queue URL, and configure Illumio log forwarding to the Amazon S3 bucket, refer to the [Connector Setup Guide](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IllumioSaaS/Data%20Connectors/IllumioSaasLogs_ccf/Readme.md). +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **AWS Role ARN** +- **AWS SQS Queue URL** +- **Table Name** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + +**Add Account** + +*Add Account* + +When you click the "Add Account" button in the portal, a configuration form will open. You'll need to provide: + +- **Role ARN** (optional): Enter Role ARN +- **Flow Log Queue URL** (optional): Enter Flow log SQL Queue URL + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/illumiosaasdataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/illumiosaasdataconnector.md index c83160d4329..98048c86b9f 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/illumiosaasdataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/illumiosaasdataconnector.md @@ -10,4 +10,68 @@ [Illumio](https://www.illumio.com/) connector provides the capability to ingest events into Microsoft Sentinel. The connector provides ability to ingest auditable and flow events from AWS S3 bucket. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **SQS and AWS S3 account credentials/permissions**: **AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](). If you are using s3 bucket provided by Illumio, contact Illumio support. At your request they will provide you with the AWS S3 bucket name, AWS SQS url and AWS credentials to access them. +- **Illumio API key and secret**: **ILLUMIO_API_KEY**, **ILLUMIO_API_SECRET** is required for a workbook to make connection to SaaS PCE and fetch api responses. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**1. Prerequisites** + +1. Ensure AWS SQS is configured for the s3 bucket from which flow and auditable event logs are going to be pulled. In case, Illumio provides bucket, please contact Illumio support for sqs url, s3 bucket name and aws credentials. + 2. Register AAD application - For DCR (Data collection rule) to authentiate to ingest data into log analytics, you must use Entra application. 1. [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. + 2. Ensure you have created a log analytics workspace. +Please keep note of the name and region where it has been deployed. + +**2. Deployment** + +Choose one of the approaches from below options. Either use the below ARM template to deploy azure resources or deploy function app manually. + +**1. Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of Azure resources using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IllumioSaaS-FunctionApp) +2. Provide the required details such as Microsoft Sentinel Workspace, AWS credentials, Azure AD Application details and ingestion configurations +> **NOTE:** It is recommended to create a new Resource Group for deployment of function app and associated resources. +3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +4. Click **Purchase** to deploy. + +**2. Deploy additional function apps to handle scale** + +Use this method for automated deployment of additional function apps using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IllumioSaaS-QueueTriggerFunctionApp) + +**3. Manual Deployment of Azure Functions** + +Deployment via Visual Studio Code. + +**1. Deploy a Function App** + +1. Download the [Azure Function App](https://github.com/Azure/Azure-Sentinel/raw/master/Solutions/IllumioSaaS/Data%20Connectors/IllumioEventsConn.zip) file. Extract archive to your local development computer. +2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode. +3. After successful deployment of the function app, follow next steps for configuring it. + +**2. Configure the Function App** + +1. Follow documentation to set up all required environment variables and click **Save**. Ensure you restart the function app once settings are saved. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/illusiveattackmanagementsystem.md b/Tools/Solutions Analyzer/connector-docs/connectors/illusiveattackmanagementsystem.md index 83edfa61182..9d926d98c58 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/illusiveattackmanagementsystem.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/illusiveattackmanagementsystem.md @@ -10,4 +10,66 @@ The Illusive Platform Connector allows you to share Illusive's attack surface analysis data and incident logs with Microsoft Sentinel and view this information in dedicated dashboards that offer insight into your organization's attack surface risk (ASM Dashboard) and track unauthorized lateral movement in your organization's network (ADS Dashboard). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Illusive Common Event Format (CEF) logs to Syslog agent** + +1. Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address. +> 2. Log onto the Illusive Console, and navigate to Settings->Reporting. +> 3. Find Syslog Servers +> 4. Supply the following information: +>> 1. Host name: Linux Syslog agent IP address or FQDN host name +>> 2. Port: 514 +>> 3. Protocol: TCP +>> 4. Audit messages: Send audit messages to server +> 5. To add the syslog server, click Add. +> 6. For more information about how to add a new syslog server in the Illusive platform, please find the Illusive Networks Admin Guide in here: https://support.illusivenetworks.com/hc/en-us/sections/360002292119-Documentation-by-Version + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/illusiveattackmanagementsystemama.md b/Tools/Solutions Analyzer/connector-docs/connectors/illusiveattackmanagementsystemama.md index 2d3d8981149..356318c85ce 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/illusiveattackmanagementsystemama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/illusiveattackmanagementsystemama.md @@ -10,4 +10,67 @@ The Illusive Platform Connector allows you to share Illusive's attack surface analysis data and incident logs with Microsoft Sentinel and view this information in dedicated dashboards that offer insight into your organization's attack surface risk (ASM Dashboard) and track unauthorized lateral movement in your organization's network (ADS Dashboard). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Illusive Common Event Format (CEF) logs to Syslog agent** + + 1. Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address. +> 2. Log onto the Illusive Console, and navigate to Settings->Reporting. +> 3. Find Syslog Servers +> 4. Supply the following information: +>> 1. Host name: Linux Syslog agent IP address or FQDN host name +>> 2. Port: 514 +>> 3. Protocol: TCP +>> 4. Audit messages: Send audit messages to server +> 5. To add the syslog server, click Add. +> 6. For more information about how to add a new syslog server in the Illusive platform, please find the Illusive Networks Admin Guide in here: https://support.illusivenetworks.com/hc/en-us/sections/360002292119-Documentation-by-Version + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/impervacloudwaflogsccfdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/impervacloudwaflogsccfdefinition.md index 7fbc4e4c0fe..beac0a7ad49 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/impervacloudwaflogsccfdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/impervacloudwaflogsccfdefinition.md @@ -10,4 +10,48 @@ The Imperva WAF Cloud data connector provides the capability to ingest logs into Microsoft Sentinel using the Imperva Log Integration through AWS S3 Bucket. Refer to [Imperva WAF Cloud Log Integration](https://docs.imperva.com/bundle/cloud-application-security/page/settings/log-integration.htm) for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Imperva WAF Cloud to Microsoft Sentinel** +>**NOTE:** This connector fetches the Imperva Cloud WAF logs from AWS S3 bucket +To gather data from Imperva, you need to configure the following resources +#### 1. AWS Role ARN + To gather data from Imperva, you'll need AWS Role ARN. +#### 2. AWS SQS Queue URL + To gather data from Imperva, you'll need AWS SQS Queue URL. + + +For detailed steps to retrieve the AWS Role ARN, SQS Queue URL, and configure Imperva log forwarding to the Amazon S3 bucket, refer to the [Connector Setup Guide](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ImpervaCloudWAF/Data%20Connectors/Readme.md). +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **AWS Role ARN** +- **AWS SQS Queue URL** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + +**Add Account** + +*Add Account* + +When you click the "Add Account" button in the portal, a configuration form will open. You'll need to provide: + +- **Role ARN** (optional): Enter Role ARN +- **Queue URL** (optional): Enter SQL Queue URL + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/impervawafcloudapi.md b/Tools/Solutions Analyzer/connector-docs/connectors/impervawafcloudapi.md index 9e40e8dee46..f73e9f66783 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/impervawafcloudapi.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/impervawafcloudapi.md @@ -10,4 +10,94 @@ The [Imperva Cloud WAF](https://www.imperva.com/resources/resource-library/datasheets/imperva-cloud-waf/) data connector provides the capability to integrate and ingest Web Application Firewall events into Microsoft Sentinel through the REST API. Refer to Log integration [documentation](https://docs.imperva.com/bundle/cloud-application-security/page/settings/log-integration.htm#Download) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **ImpervaAPIID**, **ImpervaAPIKey**, **ImpervaLogServerURI** are required for the API. [See the documentation to learn more about Setup Log Integration process](https://docs.imperva.com/bundle/cloud-application-security/page/settings/log-integration.htm#Setuplogintegration). Check all [requirements and follow the instructions](https://docs.imperva.com/bundle/cloud-application-security/page/settings/log-integration.htm#Setuplogintegration) for obtaining credentials. Please note that this connector uses CEF log event format. [More information](https://docs.imperva.com/bundle/cloud-application-security/page/more/log-file-structure.htm#Logfilestructure) about log format. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Imperva Cloud API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App. + +>**NOTE:**This data connector depends on a parser based on a Kusto Function to work as expected [**ImpervaWAFCloud**](https://aka.ms/sentinel-impervawafcloud-parser) which is deployed with the Microsoft Sentinel Solution. + +**STEP 1 - Configuration steps for the Log Integration** + + [Follow the instructions](https://docs.imperva.com/bundle/cloud-application-security/page/settings/log-integration.htm#Setuplogintegration) to obtain the credentials. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions** + +>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Imperva Cloud WAF data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-impervawafcloud-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. +3. Enter the **ImpervaAPIID**, **ImpervaAPIKey**, **ImpervaLogServerURI** and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Imperva Cloud WAF data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure functions development. + +1. Download the [Azure Functions App](https://aka.ms/sentinel-impervawafcloud-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ImpervaCloudXXXXX). + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select ** New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + ImpervaAPIID + ImpervaAPIKey + ImpervaLogServerURI + WorkspaceID + WorkspaceKey + logAnalyticsUri (optional) +> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +3. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/impervawafgateway.md b/Tools/Solutions Analyzer/connector-docs/connectors/impervawafgateway.md index 225a0bbf036..8bb05f5db17 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/impervawafgateway.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/impervawafgateway.md @@ -10,4 +10,64 @@ The [Imperva](https://www.imperva.com) connector will allow you to quickly connect your Imperva WAF Gateway alerts to Azure Sentinel. This provides you additional insight into your organization's WAF traffic and improves your security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Azure Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Azure Sentinel will use as the proxy between your security solution and Azure Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Azure Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Common Event Format (CEF) logs to Syslog agent** + +Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address. + +**3. SecureSphere MX Configuration** + +This connector requires an Action Interface and Action Set to be created on the Imperva SecureSphere MX. [Follow the steps](https://community.imperva.com/blogs/craig-burlingame1/2020/11/13/steps-for-enabling-imperva-waf-gateway-alert) to create the requirements. +**3.1 Create the Action Interface** + + Create a new Action Interface that contains the required parameters to send WAF alerts to Azure Sentinel. + + **3.2 Create the Action Set** + + Create a new Action Set that uses the Action Interface configured. + + **3.3 Apply the Action Set** + + Apply the Action Set to any Security Policies you wish to have alerts for sent to Azure Sentinel. +**4. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. +**4.1 Check for logs in the past 5 minutes using the following command. + +CommonSecurityLog | where DeviceVendor == "Imperva Inc." | where DeviceProduct == "WAF Gateway" | where TimeGenerated == ago(5m)** +**5. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/infobloxclouddataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/infobloxclouddataconnector.md index b0518b92793..53e9b049971 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/infobloxclouddataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/infobloxclouddataconnector.md @@ -10,4 +10,90 @@ The Infoblox Cloud Data Connector allows you to easily connect your Infoblox BloxOne data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**IMPORTANT:** This data connector depends on a parser based on a Kusto Function to work as expected called [**InfobloxCDC**](https://aka.ms/sentinel-InfobloxCloudDataConnector-parser) which is deployed with the Microsoft Sentinel Solution. + +>**IMPORTANT:** This Microsoft Sentinel data connector assumes an Infoblox Data Connector host has already been created and configured in the Infoblox Cloud Services Portal (CSP). As the [**Infoblox Data Connector**](https://docs.infoblox.com/display/BloxOneThreatDefense/Deploying+the+Data+Connector+Solution) is a feature of BloxOne Threat Defense, access to an appropriate BloxOne Threat Defense subscription is required. See this [**quick-start guide**](https://www.infoblox.com/wp-content/uploads/infoblox-deployment-guide-data-connector.pdf) for more information and licensing requirements. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Configure Infoblox BloxOne to send Syslog data to the Infoblox Cloud Data Connector to forward to the Syslog agent** + +Follow the steps below to configure the Infoblox CDC to send BloxOne data to Microsoft Sentinel via the Linux Syslog agent. +1. Navigate to **Manage > Data Connector**. +2. Click the **Destination Configuration** tab at the top. +3. Click **Create > Syslog**. + - **Name**: Give the new Destination a meaningful **name**, such as **Microsoft-Sentinel-Destination**. + - **Description**: Optionally give it a meaningful **description**. + - **State**: Set the state to **Enabled**. + - **Format**: Set the format to **CEF**. + - **FQDN/IP**: Enter the IP address of the Linux device on which the Linux agent is installed. + - **Port**: Leave the port number at **514**. + - **Protocol**: Select desired protocol and CA certificate if applicable. + - Click **Save & Close**. +4. Click the **Traffic Flow Configuration** tab at the top. +5. Click **Create**. + - **Name**: Give the new Traffic Flow a meaningful **name**, such as **Microsoft-Sentinel-Flow**. + - **Description**: Optionally give it a meaningful **description**. + - **State**: Set the state to **Enabled**. + - Expand the **Service Instance** section. + - **Service Instance**: Select your desired Service Instance for which the Data Connector service is enabled. + - Expand the **Source Configuration** section. + - **Source**: Select **BloxOne Cloud Source**. + - Select all desired **log types** you wish to collect. Currently supported log types are: + - Threat Defense Query/Response Log + - Threat Defense Threat Feeds Hits Log + - DDI Query/Response Log + - DDI DHCP Lease Log + - Expand the **Destination Configuration** section. + - Select the **Destination** you just created. + - Click **Save & Close**. +6. Allow the configuration some time to activate. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/infobloxclouddataconnectorama.md b/Tools/Solutions Analyzer/connector-docs/connectors/infobloxclouddataconnectorama.md index 8b84dd62963..d5d3d5311c3 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/infobloxclouddataconnectorama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/infobloxclouddataconnectorama.md @@ -1,13 +1,99 @@ -# [Recommended] Infoblox Cloud Data Connector via AMA +# [Deprecated] Infoblox Cloud Data Connector via AMA | | | |----------|-------| | **Connector ID** | `InfobloxCloudDataConnectorAma` | | **Publisher** | Infoblox | | **Tables Ingested** | [`CommonSecurityLog`](../tables-index.md#commonsecuritylog) | -| **Used in Solutions** | [Infoblox](../solutions/infoblox.md), [Infoblox Cloud Data Connector](../solutions/infoblox-cloud-data-connector.md) | -| **Connector Definition Files** | [template_InfobloxCloudDataConnectorAma.JSON](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCEFDataConnector/template_InfobloxCloudDataConnectorAma.JSON) | +| **Used in Solutions** | [Infoblox Cloud Data Connector](../solutions/infoblox-cloud-data-connector.md) | +| **Connector Definition Files** | [template_InfobloxCloudDataConnectorAMA.json](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20Cloud%20Data%20Connector/Data%20Connectors/template_InfobloxCloudDataConnectorAMA.json) | -The Infoblox Cloud Data Connector allows you to easily connect your Infoblox data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log. +The Infoblox Cloud Data Connector allows you to easily connect your Infoblox BloxOne data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log. + +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**IMPORTANT:** This data connector depends on a parser based on a Kusto Function to work as expected called [**InfobloxCDC**](https://aka.ms/sentinel-InfobloxCloudDataConnector-parser) which is deployed with the Microsoft Sentinel Solution. + +>**IMPORTANT:** This Microsoft Sentinel data connector assumes an Infoblox Data Connector host has already been created and configured in the Infoblox Cloud Services Portal (CSP). As the [**Infoblox Data Connector**](https://docs.infoblox.com/display/BloxOneThreatDefense/Deploying+the+Data+Connector+Solution) is a feature of BloxOne Threat Defense, access to an appropriate BloxOne Threat Defense subscription is required. See this [**quick-start guide**](https://www.infoblox.com/wp-content/uploads/infoblox-deployment-guide-data-connector.pdf) for more information and licensing requirements. +**1. Follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note: CEF logs are collected only from Linux Agents_ + +1. Navigate to your **Microsoft Sentinel workspace > Data connectors** blade. + +2. Search for the **Common Event Format (CEF) via AMA** data connector and open it. + +3. Ensure there is no existing DCR configured to collect required facility of logs as it may cause log duplication. Create a new **DCR (Data Collection Rule)**. + + _Note: It is recommended to install the AMA agent v1.27 at minimum. [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplication._ + +4. Run the command provided in the **CEF via AMA data connector** page to configure the CEF collector on the machine. + + **Step B. Configure Infoblox BloxOne to send Syslog data to the Infoblox Cloud Data Connector to forward to the Syslog agent** + + Follow the steps below to configure the Infoblox CDC to send BloxOne data to Microsoft Sentinel via the Linux Syslog agent. +1. Navigate to **Manage > Data Connector**. +2. Click the **Destination Configuration** tab at the top. +3. Click **Create > Syslog**. + - **Name**: Give the new Destination a meaningful **name**, such as **Microsoft-Sentinel-Destination**. + - **Description**: Optionally give it a meaningful **description**. + - **State**: Set the state to **Enabled**. + - **Format**: Set the format to **CEF**. + - **FQDN/IP**: Enter the IP address of the Linux device on which the Linux agent is installed. + - **Port**: Leave the port number at **514**. + - **Protocol**: Select desired protocol and CA certificate if applicable. + - Click **Save & Close**. +4. Click the **Traffic Flow Configuration** tab at the top. +5. Click **Create**. + - **Name**: Give the new Traffic Flow a meaningful **name**, such as **Microsoft-Sentinel-Flow**. + - **Description**: Optionally give it a meaningful **description**. + - **State**: Set the state to **Enabled**. + - Expand the **Service Instance** section. + - **Service Instance**: Select your desired Service Instance for which the Data Connector service is enabled. + - Expand the **Source Configuration** section. + - **Source**: Select **BloxOne Cloud Source**. + - Select all desired **log types** you wish to collect. Currently supported log types are: + - Threat Defense Query/Response Log + - Threat Defense Threat Feeds Hits Log + - DDI Query/Response Log + - DDI DHCP Lease Log + - Expand the **Destination Configuration** section. + - Select the **Destination** you just created. + - Click **Save & Close**. +6. Allow the configuration some time to activate. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/infobloxdataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/infobloxdataconnector.md index 55cd18dfffb..d2dc54fd777 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/infobloxdataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/infobloxdataconnector.md @@ -10,4 +10,96 @@ The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Azure Subscription**: Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group. +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead) + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - App Registration steps for the Application in Microsoft Entra ID** + + This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID: + 1. Sign in to the [Azure portal](https://portal.azure.com/). + 2. Search for and select **Microsoft Entra ID**. + 3. Under **Manage**, select **App registrations > New registration**. + 4. Enter a display **Name** for your application. + 5. Select **Register** to complete the initial app registration. + 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app) + +**STEP 2 - Add a client secret for application in Microsoft Entra ID** + + Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret: + 1. In the Azure portal, in **App registrations**, select your application. + 2. Select **Certificates & secrets > Client secrets > New client secret**. + 3. Add a description for your client secret. + 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months. + 5. Select **Add**. + 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret) + +**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID** + + Follow the steps in this section to assign the role: + 1. In the Azure portal, Go to **Resource Group** and select your resource group. + 2. Go to **Access control (IAM)** from left panel. + 3. Click on **Add**, and then select **Add role assignment**. + 4. Select **Contributor** as role and click on next. + 5. In **Assign access to**, select `User, group, or service principal`. + 6. Click on **add members** and type **your app name** that you have created and select it. + 7. Now click on **Review + assign** and then again click on **Review + assign**. + +> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal) + +**STEP 4 - Steps to generate the Infoblox API Credentials** + + Follow these instructions to generate Infoblox API Key. + In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F). + +**STEP 5 - Steps to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**6. Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Infoblox Data connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the below information : + Azure Tenant Id + Azure Client Id + Azure Client Secret + Infoblox API Token + Infoblox Base URL + Workspace ID + Workspace Key + Log Level (Default: INFO) + Confidence + Threat Level + App Insights Workspace Resource ID +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/infobloxnios.md b/Tools/Solutions Analyzer/connector-docs/connectors/infobloxnios.md index 13fb833a97b..d9d6b380c1a 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/infobloxnios.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/infobloxnios.md @@ -10,4 +10,51 @@ The [Infoblox Network Identity Operating System (NIOS)](https://www.infoblox.com/glossary/network-identity-operating-system-nios/) connector allows you to easily connect your Infoblox NIOS logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +**Custom Permissions:** +- **Infoblox NIOS**: must be configured to export logs via Syslog + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Infoblox and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20NIOS/Parser/Infoblox.yaml), on the second line of the query, enter any unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. + 2. Select **Apply below configuration to my machines** and select the facilities and severities. + 3. Click **Save**. +- **Open Syslog settings** + +**3. Configure and connect the Infoblox NIOS** + +[Follow these instructions](https://www.infoblox.com/wp-content/uploads/infoblox-deployment-guide-slog-and-snmp-configuration-for-nios.pdf) to enable syslog forwarding of Infoblox NIOS Logs. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address. + +**4. Configure the Sentinel parser** + +Update the watchlist 'Sources_by_Source' with the hostname(s) of your Infoblox device(s). Set SourceType to 'InfobloxNIOS' and Source to the value of 'Computer' seen in the logs seen in Syslog table. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/infobloxsocinsightsdataconnector-ama.md b/Tools/Solutions Analyzer/connector-docs/connectors/infobloxsocinsightsdataconnector-ama.md index dd689d7e551..46f3fb39a92 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/infobloxsocinsightsdataconnector-ama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/infobloxsocinsightsdataconnector-ama.md @@ -14,4 +14,102 @@ The Infoblox SOC Insight Data Connector allows you to easily connect your Infobl This data connector ingests Infoblox SOC Insight CDC logs into your Log Analytics Workspace using the new Azure Monitor Agent. Learn more about ingesting using the new Azure Monitor Agent [here](https://learn.microsoft.com/azure/sentinel/connect-cef-ama). **Microsoft recommends using this Data Connector.** +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Workspace Keys** + +In order to use the playbooks as part of this solution, find your **Workspace ID** and **Workspace Primary Key** below for your convenience. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Workspace Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**2. Parsers** + +>This data connector depends on a parser based on a Kusto Function to work as expected called [**InfobloxCDC_SOCInsights**](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights/Parsers/InfobloxCDC_SOCInsights.yaml) which is deployed with the Microsoft Sentinel Solution. + +**3. SOC Insights** + +>This data connector assumes you have access to Infoblox BloxOne Threat Defense SOC Insights. You can find more information about SOC Insights [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/501514252/SOC+Insights). + +**4. Infoblox Cloud Data Connector** + +>This data connector assumes an Infoblox Data Connector host has already been created and configured in the Infoblox Cloud Services Portal (CSP). As the [**Infoblox Data Connector**](https://docs.infoblox.com/display/BloxOneThreatDefense/Deploying+the+Data+Connector+Solution) is a feature of BloxOne Threat Defense, access to an appropriate BloxOne Threat Defense subscription is required. See this [**quick-start guide**](https://www.infoblox.com/wp-content/uploads/infoblox-deployment-guide-data-connector.pdf) for more information and licensing requirements. +**Follow the steps below to configure this data connector** + +**A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note: CEF logs are collected only from Linux Agents_ + +1. Navigate to your **Microsoft Sentinel workspace > Data connectors** blade. + +2. Search for the **Common Event Format (CEF) via AMA** data connector and open it. + +3. Ensure there is no existing DCR configured to collect required facility of logs as it may cause log duplication. Create a new **DCR (Data Collection Rule)**. + + _Note: It is recommended to install the AMA agent v1.27 at minimum. [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplication._ + +4. Run the command provided in the **Common Event Format (CEF) via AMA** data connector page to configure the CEF collector on the machine. + + **B. Within the Infoblox Cloud Services Portal, configure Infoblox BloxOne to send CEF Syslog data to the Infoblox Cloud Data Connector to forward to the Syslog agent** + + Follow the steps below to configure the Infoblox CDC to send BloxOne data to Microsoft Sentinel via the Linux Syslog agent. +1. Navigate to **Manage > Data Connector**. +2. Click the **Destination Configuration** tab at the top. +3. Click **Create > Syslog**. + - **Name**: Give the new Destination a meaningful **name**, such as **Microsoft-Sentinel-Destination**. + - **Description**: Optionally give it a meaningful **description**. + - **State**: Set the state to **Enabled**. + - **Format**: Set the format to **CEF**. + - **FQDN/IP**: Enter the IP address of the Linux device on which the Linux agent is installed. + - **Port**: Leave the port number at **514**. + - **Protocol**: Select desired protocol and CA certificate if applicable. + - Click **Save & Close**. +4. Click the **Traffic Flow Configuration** tab at the top. +5. Click **Create**. + - **Name**: Give the new Traffic Flow a meaningful **name**, such as **Microsoft-Sentinel-Flow**. + - **Description**: Optionally give it a meaningful **description**. + - **State**: Set the state to **Enabled**. + - Expand the **Service Instance** section. + - **Service Instance**: Select your desired Service Instance for which the Data Connector service is enabled. + - Expand the **Source Configuration** section. + - **Source**: Select **BloxOne Cloud Source**. + - Select the **Internal Notifications** Log Type. + - Expand the **Destination Configuration** section. + - Select the **Destination** you just created. + - Click **Save & Close**. +6. Allow the configuration some time to activate. + + **C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/infobloxsocinsightsdataconnector-api.md b/Tools/Solutions Analyzer/connector-docs/connectors/infobloxsocinsightsdataconnector-api.md index 4f60d522fb7..ed50e749831 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/infobloxsocinsightsdataconnector-api.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/infobloxsocinsightsdataconnector-api.md @@ -10,4 +10,39 @@ The Infoblox SOC Insight Data Connector allows you to easily connect your Infoblox BloxOne SOC Insight data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Workspace Keys** + +In order to use the playbooks as part of this solution, find your **Workspace ID** and **Workspace Primary Key** below for your convenience. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Workspace Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**2. Parsers** + +>This data connector depends on a parser based on a Kusto Function to work as expected called [**InfobloxInsight**](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights/Parsers/InfobloxInsight.yaml) which is deployed with the Microsoft Sentinel Solution. + +**3. SOC Insights** + +>This data connector assumes you have access to Infoblox BloxOne Threat Defense SOC Insights. You can find more information about SOC Insights [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/501514252/SOC+Insights). + +**4. Follow the steps below to configure this data connector** +**1. Generate an Infoblox API Key and copy it somewhere safe** + + In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F). + + **2. Configure the Infoblox-SOC-Get-Open-Insights-API playbook** + + Create and configure the **Infoblox-SOC-Get-Open-Insights-API** playbook which is deployed with this solution. Enter your Infoblox API key in the appropriate parameter when prompted. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/infobloxsocinsightsdataconnector-legacy.md b/Tools/Solutions Analyzer/connector-docs/connectors/infobloxsocinsightsdataconnector-legacy.md index 94f25e2d829..e0da42e9f52 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/infobloxsocinsightsdataconnector-legacy.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/infobloxsocinsightsdataconnector-legacy.md @@ -22,4 +22,102 @@ This data connector ingests Infoblox SOC Insight CDC logs into your Log Analytic Using MMA and AMA on the same machine can cause log duplication and extra ingestion cost. [More details](https://learn.microsoft.com/en-us/azure/sentinel/ama-migrate). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Workspace Keys** + +In order to use the playbooks as part of this solution, find your **Workspace ID** and **Workspace Primary Key** below for your convenience. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Workspace Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**2. Parsers** + +>This data connector depends on a parser based on a Kusto Function to work as expected called [**InfobloxCDC_SOCInsights**](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights/Parsers/InfobloxCDC_SOCInsights.yaml) which is deployed with the Microsoft Sentinel Solution. + +**3. SOC Insights** + +>This data connector assumes you have access to Infoblox BloxOne Threat Defense SOC Insights. You can find more information about SOC Insights [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/501514252/SOC+Insights). + +**4. Infoblox Cloud Data Connector** + +>This data connector assumes an Infoblox Data Connector host has already been created and configured in the Infoblox Cloud Services Portal (CSP). As the [**Infoblox Data Connector**](https://docs.infoblox.com/display/BloxOneThreatDefense/Deploying+the+Data+Connector+Solution) is a feature of BloxOne Threat Defense, access to an appropriate BloxOne Threat Defense subscription is required. See this [**quick-start guide**](https://www.infoblox.com/wp-content/uploads/infoblox-deployment-guide-data-connector.pdf) for more information and licensing requirements. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Within the Infoblox Cloud Services Portal, configure Infoblox BloxOne to send CEF Syslog data to the Infoblox Cloud Data Connector to forward to the Syslog agent** + +Follow the steps below to configure the Infoblox CDC to send BloxOne data to Microsoft Sentinel via the Linux Syslog agent. +1. Navigate to **Manage > Data Connector**. +2. Click the **Destination Configuration** tab at the top. +3. Click **Create > Syslog**. + - **Name**: Give the new Destination a meaningful **name**, such as **Microsoft-Sentinel-Destination**. + - **Description**: Optionally give it a meaningful **description**. + - **State**: Set the state to **Enabled**. + - **Format**: Set the format to **CEF**. + - **FQDN/IP**: Enter the IP address of the Linux device on which the Linux agent is installed. + - **Port**: Leave the port number at **514**. + - **Protocol**: Select desired protocol and CA certificate if applicable. + - Click **Save & Close**. +4. Click the **Traffic Flow Configuration** tab at the top. +5. Click **Create**. + - **Name**: Give the new Traffic Flow a meaningful **name**, such as **Microsoft-Sentinel-Flow**. + - **Description**: Optionally give it a meaningful **description**. + - **State**: Set the state to **Enabled**. + - Expand the **Service Instance** section. + - **Service Instance**: Select your desired Service Instance for which the Data Connector service is enabled. + - Expand the **Source Configuration** section. + - **Source**: Select **BloxOne Cloud Source**. + - Select the **Internal Notifications** Log Type. + - Expand the **Destination Configuration** section. + - Select the **Destination** you just created. + - Click **Save & Close**. +6. Allow the configuration some time to activate. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/infosecdataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/infosecdataconnector.md index bcafd29b185..759ed9fb8fc 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/infosecdataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/infosecdataconnector.md @@ -10,4 +10,24 @@ Use this data connector to integrate with InfoSec Crypto Analytics and get data sent directly to Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. InfoSecGlobal Crypto Analytics Data Connector** + +1. Data is sent to Microsoft Sentinel through Logstash + 2. Required Logstash configuration is included with Crypto Analytics installation + 3. Documentation provided with the Crypto Analytics installation explains how to enable sending data to Microsoft Sentinel +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/insightvmcloudapi.md b/Tools/Solutions Analyzer/connector-docs/connectors/insightvmcloudapi.md index 3bddcea14e1..504caa21546 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/insightvmcloudapi.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/insightvmcloudapi.md @@ -10,4 +10,72 @@ The [Rapid7 Insight VM](https://www.rapid7.com/products/insightvm/) Report data connector provides the capability to ingest Scan reports and vulnerability data into Microsoft Sentinel through the REST API from the Rapid7 Insight platform (Managed in the cloud). Refer to [API documentation](https://docs.rapid7.com/insight/api-overview/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials**: **InsightVMAPIKey** is required for REST API. [See the documentation to learn more about API](https://docs.rapid7.com/insight/api-overview/). Check all [requirements and follow the instructions](https://docs.rapid7.com/insight/managing-platform-api-keys/) for obtaining credentials + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Insight VM API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This data connector depends on a parsers based on a Kusto Function to work as expected [**InsightVMAssets**](https://aka.ms/sentinel-InsightVMAssets-parser) and [**InsightVMVulnerabilities**](https://aka.ms/sentinel-InsightVMVulnerabilities-parser) which is deployed with the Microsoft Sentinel Solution. + +**STEP 1 - Configuration steps for the Insight VM Cloud** + + [Follow the instructions](https://docs.rapid7.com/insight/managing-platform-api-keys/) to obtain the credentials. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Option 1 - Azure Resource Manager (ARM) Template** + + Use this method for automated deployment of the Rapid7 Insight Vulnerability Management Report data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-InsightVMCloudAPI-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. +3. Enter the **InsightVMAPIKey**, choose **InsightVMCloudRegion** and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + + **Option 2 - Manual Deployment of Azure Functions** + + Use the following step-by-step instructions to deploy the Rapid7 Insight Vulnerability Management Report data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + 1. Download the [Azure Function App](https://github.com/averbn/azure_sentinel_data_connectors/raw/main/insight-vm-cloud-azure-sentinel-data-connector/InsightVMCloudAPISentinelConn.zip) file. Extract archive to your local development computer. +2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode. +3. After successful deployment of the function app, follow next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. Go to Azure Portal for the Function App configuration. +2. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + InsightVMAPIKey + InsightVMCloudRegion + WorkspaceID + WorkspaceKey + logAnalyticsUri (optional) +> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +3. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ipinfoabusedataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/ipinfoabusedataconnector.md index b0050edf29d..d2342f3e003 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ipinfoabusedataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ipinfoabusedataconnector.md @@ -10,4 +10,71 @@ This IPinfo data connector installs an Azure Function app to download standard_abuse datasets and insert it into custom log table in Microsoft Sentinel +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **IPinfo API Token**: Retrieve your IPinfo API Token [here](https://ipinfo.io/). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Retrieve API Token** + +Retrieve your IPinfo API Token [here](https://ipinfo.io/). + +**2. In your Azure AD tenant, create an Azure Active Directory (AAD) application** + +In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link. + +**3. Assign the AAD application the Microsoft Sentinel Contributor Role.** + +Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same “Resource Group” you use for “Log Analytic Workspace” on which “Microsoft Sentinel” is added: Use this Link. + +**4. Get Workspace Resource ID** + +Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' + +**5. Deploy the Azure Function** + +Use this for automated deployment of the IPinfo data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-Abuse-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**. + +**1. Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + 1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-Abuse-functionapp). +2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. +3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. +4. After successful deployment of the function app, follow the next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. Go to Azure Portal for the Function App configuration. +2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. +3. In the **Application settings** tab, select **+ New application setting**. +4. Add each of the following application settings individually, with their respective string values (case-sensitive): + RESOURCE_ID + IPINFO_TOKEN + TENANT_ID + CLIENT_ID + CLIENT_SECRET + RETENTION_IN_DAYS + TOTAL_RETENTION_IN_DAYS + SCHEDULE + LOCATION +5. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ipinfoasndataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/ipinfoasndataconnector.md index 83db73cbc13..ff07c5ed092 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ipinfoasndataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ipinfoasndataconnector.md @@ -10,4 +10,71 @@ This IPinfo data connector installs an Azure Function app to download standard_ASN datasets and insert it into custom log table in Microsoft Sentinel +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **IPinfo API Token**: Retrieve your IPinfo API Token [here](https://ipinfo.io/). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Retrieve API Token** + +Retrieve your IPinfo API Token [here](https://ipinfo.io/). + +**2. In your Azure AD tenant, create an Azure Active Directory (AAD) application** + +In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link. + +**3. Assign the AAD application the Microsoft Sentinel Contributor Role.** + +Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same “Resource Group” you use for “Log Analytic Workspace” on which “Microsoft Sentinel” is added: Use this Link. + +**4. Get Workspace Resource ID** + +Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' + +**5. Deploy the Azure Function** + +Use this for automated deployment of the IPinfo data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-ASN-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**. + +**1. Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + 1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-ASN-functionapp). +2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. +3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. +4. After successful deployment of the function app, follow the next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. Go to Azure Portal for the Function App configuration. +2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. +3. In the **Application settings** tab, select **+ New application setting**. +4. Add each of the following application settings individually, with their respective string values (case-sensitive): + RESOURCE_ID + IPINFO_TOKEN + TENANT_ID + CLIENT_ID + CLIENT_SECRET + RETENTION_IN_DAYS + TOTAL_RETENTION_IN_DAYS + SCHEDULE + LOCATION +5. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ipinfocarrierdataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/ipinfocarrierdataconnector.md index 5b2a2588649..209eac63738 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ipinfocarrierdataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ipinfocarrierdataconnector.md @@ -10,4 +10,71 @@ This IPinfo data connector installs an Azure Function app to download standard_carrier datasets and insert it into custom log table in Microsoft Sentinel +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **IPinfo API Token**: Retrieve your IPinfo API Token [here](https://ipinfo.io/). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Retrieve API Token** + +Retrieve your IPinfo API Token [here](https://ipinfo.io/). + +**2. In your Azure AD tenant, create an Azure Active Directory (AAD) application** + +In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link. + +**3. Assign the AAD application the Microsoft Sentinel Contributor Role.** + +Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same “Resource Group” you use for “Log Analytic Workspace” on which “Microsoft Sentinel” is added: Use this Link. + +**4. Get Workspace Resource ID** + +Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' + +**5. Deploy the Azure Function** + +Use this for automated deployment of the IPinfo data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-Carrier-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**. + +**1. Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + 1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-Carrier-functionapp). +2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. +3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. +4. After successful deployment of the function app, follow the next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. Go to Azure Portal for the Function App configuration. +2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. +3. In the **Application settings** tab, select **+ New application setting**. +4. Add each of the following application settings individually, with their respective string values (case-sensitive): + RESOURCE_ID + IPINFO_TOKEN + TENANT_ID + CLIENT_ID + CLIENT_SECRET + RETENTION_IN_DAYS + TOTAL_RETENTION_IN_DAYS + SCHEDULE + LOCATION +5. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ipinfocompanydataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/ipinfocompanydataconnector.md index cfcd91f4e31..9f08421ab28 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ipinfocompanydataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ipinfocompanydataconnector.md @@ -10,4 +10,71 @@ This IPinfo data connector installs an Azure Function app to download standard_company datasets and insert it into custom log table in Microsoft Sentinel +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **IPinfo API Token**: Retrieve your IPinfo API Token [here](https://ipinfo.io/). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Retrieve API Token** + +Retrieve your IPinfo API Token [here](https://ipinfo.io/). + +**2. In your Azure AD tenant, create an Azure Active Directory (AAD) application** + +In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link. + +**3. Assign the AAD application the Microsoft Sentinel Contributor Role.** + +Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same “Resource Group” you use for “Log Analytic Workspace” on which “Microsoft Sentinel” is added: Use this Link. + +**4. Get Workspace Resource ID** + +Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' + +**5. Deploy the Azure Function** + +Use this for automated deployment of the IPinfo data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-Company-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**. + +**1. Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + 1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-Company-functionapp). +2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. +3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. +4. After successful deployment of the function app, follow the next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. Go to Azure Portal for the Function App configuration. +2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. +3. In the **Application settings** tab, select **+ New application setting**. +4. Add each of the following application settings individually, with their respective string values (case-sensitive): + RESOURCE_ID + IPINFO_TOKEN + TENANT_ID + CLIENT_ID + CLIENT_SECRET + RETENTION_IN_DAYS + TOTAL_RETENTION_IN_DAYS + SCHEDULE + LOCATION +5. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ipinfocountrydataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/ipinfocountrydataconnector.md index 891bf2cd0ee..bfd4bcc417f 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ipinfocountrydataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ipinfocountrydataconnector.md @@ -10,4 +10,71 @@ This IPinfo data connector installs an Azure Function app to download country_asn datasets and insert it into custom log table in Microsoft Sentinel +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **IPinfo API Token**: Retrieve your IPinfo API Token [here](https://ipinfo.io/). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Retrieve API Token** + +Retrieve your IPinfo API Token [here](https://ipinfo.io/). + +**2. In your Azure AD tenant, create an Azure Active Directory (AAD) application** + +In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link. + +**3. Assign the AAD application the Microsoft Sentinel Contributor Role.** + +Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same “Resource Group” you use for “Log Analytic Workspace” on which “Microsoft Sentinel” is added: Use this Link. + +**4. Get Workspace Resource ID** + +Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' + +**5. Deploy the Azure Function** + +Use this for automated deployment of the IPinfo data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-Country-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**. + +**1. Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + 1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-Country-functionapp). +2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. +3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. +4. After successful deployment of the function app, follow the next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. Go to Azure Portal for the Function App configuration. +2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. +3. In the **Application settings** tab, select **+ New application setting**. +4. Add each of the following application settings individually, with their respective string values (case-sensitive): + RESOURCE_ID + IPINFO_TOKEN + TENANT_ID + CLIENT_ID + CLIENT_SECRET + RETENTION_IN_DAYS + TOTAL_RETENTION_IN_DAYS + SCHEDULE + LOCATION +5. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ipinfodomaindataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/ipinfodomaindataconnector.md index aa90e1e24e3..3f166f1745c 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ipinfodomaindataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ipinfodomaindataconnector.md @@ -10,4 +10,71 @@ This IPinfo data connector installs an Azure Function app to download standard_domain datasets and insert it into custom log table in Microsoft Sentinel +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **IPinfo API Token**: Retrieve your IPinfo API Token [here](https://ipinfo.io/). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Retrieve API Token** + +Retrieve your IPinfo API Token [here](https://ipinfo.io/). + +**2. In your Azure AD tenant, create an Azure Active Directory (AAD) application** + +In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link. + +**3. Assign the AAD application the Microsoft Sentinel Contributor Role.** + +Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same “Resource Group” you use for “Log Analytic Workspace” on which “Microsoft Sentinel” is added: Use this Link. + +**4. Get Workspace Resource ID** + +Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' + +**5. Deploy the Azure Function** + +Use this for automated deployment of the IPinfo data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-Domain-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**. + +**1. Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + 1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-Domain-functionapp). +2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. +3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. +4. After successful deployment of the function app, follow the next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. Go to Azure Portal for the Function App configuration. +2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. +3. In the **Application settings** tab, select **+ New application setting**. +4. Add each of the following application settings individually, with their respective string values (case-sensitive): + RESOURCE_ID + IPINFO_TOKEN + TENANT_ID + CLIENT_ID + CLIENT_SECRET + RETENTION_IN_DAYS + TOTAL_RETENTION_IN_DAYS + SCHEDULE + LOCATION +5. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ipinfoiplocationdataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/ipinfoiplocationdataconnector.md index f46bdd6cd5c..3d0b52cf9d3 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ipinfoiplocationdataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ipinfoiplocationdataconnector.md @@ -10,4 +10,71 @@ This IPinfo data connector installs an Azure Function app to download standard_location datasets and insert it into custom log table in Microsoft Sentinel +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **IPinfo API Token**: Retrieve your IPinfo API Token [here](https://ipinfo.io/). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Retrieve API Token** + +Retrieve your IPinfo API Token [here](https://ipinfo.io/). + +**2. In your Azure AD tenant, create an Azure Active Directory (AAD) application** + +In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link. + +**3. Assign the AAD application the Microsoft Sentinel Contributor Role.** + +Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same “Resource Group” you use for “Log Analytic Workspace” on which “Microsoft Sentinel” is added: Use this Link. + +**4. Get Workspace Resource ID** + +Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' + +**5. Deploy the Azure Function** + +Use this for automated deployment of the IPinfo data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-Iplocation-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**. + +**1. Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + 1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-Iplocation-functionapp). +2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. +3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. +4. After successful deployment of the function app, follow the next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. Go to Azure Portal for the Function App configuration. +2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. +3. In the **Application settings** tab, select **+ New application setting**. +4. Add each of the following application settings individually, with their respective string values (case-sensitive): + RESOURCE_ID + IPINFO_TOKEN + TENANT_ID + CLIENT_ID + CLIENT_SECRET + RETENTION_IN_DAYS + TOTAL_RETENTION_IN_DAYS + SCHEDULE + LOCATION +5. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ipinfoiplocationextendeddataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/ipinfoiplocationextendeddataconnector.md index a6face8979e..1c9fb71f1dc 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ipinfoiplocationextendeddataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ipinfoiplocationextendeddataconnector.md @@ -10,4 +10,71 @@ This IPinfo data connector installs an Azure Function app to download standard_location_extended datasets and insert it into custom log table in Microsoft Sentinel +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **IPinfo API Token**: Retrieve your IPinfo API Token [here](https://ipinfo.io/). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Retrieve API Token** + +Retrieve your IPinfo API Token [here](https://ipinfo.io/). + +**2. In your Azure AD tenant, create an Azure Active Directory (AAD) application** + +In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link. + +**3. Assign the AAD application the Microsoft Sentinel Contributor Role.** + +Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same “Resource Group” you use for “Log Analytic Workspace” on which “Microsoft Sentinel” is added: Use this Link. + +**4. Get Workspace Resource ID** + +Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' + +**5. Deploy the Azure Function** + +Use this for automated deployment of the IPinfo data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-Iplocation-Extended-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**. + +**1. Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + 1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-Iplocation-Extended-functionapp). +2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. +3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. +4. After successful deployment of the function app, follow the next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. Go to Azure Portal for the Function App configuration. +2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. +3. In the **Application settings** tab, select **+ New application setting**. +4. Add each of the following application settings individually, with their respective string values (case-sensitive): + RESOURCE_ID + IPINFO_TOKEN + TENANT_ID + CLIENT_ID + CLIENT_SECRET + RETENTION_IN_DAYS + TOTAL_RETENTION_IN_DAYS + SCHEDULE + LOCATION +5. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ipinfoprivacydataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/ipinfoprivacydataconnector.md index a2bc61822f2..600798fc4ec 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ipinfoprivacydataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ipinfoprivacydataconnector.md @@ -10,4 +10,71 @@ This IPinfo data connector installs an Azure Function app to download standard_privacy datasets and insert it into custom log table in Microsoft Sentinel +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **IPinfo API Token**: Retrieve your IPinfo API Token [here](https://ipinfo.io/). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Retrieve API Token** + +Retrieve your IPinfo API Token [here](https://ipinfo.io/). + +**2. In your Azure AD tenant, create an Azure Active Directory (AAD) application** + +In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link. + +**3. Assign the AAD application the Microsoft Sentinel Contributor Role.** + +Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same “Resource Group” you use for “Log Analytic Workspace” on which “Microsoft Sentinel” is added: Use this Link. + +**4. Get Workspace Resource ID** + +Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' + +**5. Deploy the Azure Function** + +Use this for automated deployment of the IPinfo data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-Privacy-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**. + +**1. Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + 1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-Privacy-functionapp). +2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. +3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. +4. After successful deployment of the function app, follow the next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. Go to Azure Portal for the Function App configuration. +2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. +3. In the **Application settings** tab, select **+ New application setting**. +4. Add each of the following application settings individually, with their respective string values (case-sensitive): + RESOURCE_ID + IPINFO_TOKEN + TENANT_ID + CLIENT_ID + CLIENT_SECRET + RETENTION_IN_DAYS + TOTAL_RETENTION_IN_DAYS + SCHEDULE + LOCATION +5. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ipinfoprivacyextendeddataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/ipinfoprivacyextendeddataconnector.md index cad27518f77..0f4ef2d2528 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ipinfoprivacyextendeddataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ipinfoprivacyextendeddataconnector.md @@ -10,4 +10,71 @@ This IPinfo data connector installs an Azure Function app to download standard_privacy datasets and insert it into custom log table in Microsoft Sentinel +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **IPinfo API Token**: Retrieve your IPinfo API Token [here](https://ipinfo.io/). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Retrieve API Token** + +Retrieve your IPinfo API Token [here](https://ipinfo.io/). + +**2. In your Azure AD tenant, create an Azure Active Directory (AAD) application** + +In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link. + +**3. Assign the AAD application the Microsoft Sentinel Contributor Role.** + +Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same “Resource Group” you use for “Log Analytic Workspace” on which “Microsoft Sentinel” is added: Use this Link. + +**4. Get Workspace Resource ID** + +Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' + +**5. Deploy the Azure Function** + +Use this for automated deployment of the IPinfo data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-Privacy-Extended-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**. + +**1. Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + 1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-Privacy-Extended-functionapp). +2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. +3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. +4. After successful deployment of the function app, follow the next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. Go to Azure Portal for the Function App configuration. +2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. +3. In the **Application settings** tab, select **+ New application setting**. +4. Add each of the following application settings individually, with their respective string values (case-sensitive): + RESOURCE_ID + IPINFO_TOKEN + TENANT_ID + CLIENT_ID + CLIENT_SECRET + RETENTION_IN_DAYS + TOTAL_RETENTION_IN_DAYS + SCHEDULE + LOCATION +5. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ipinforirwhoisdataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/ipinforirwhoisdataconnector.md index bc053ae947c..89d518b8353 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ipinforirwhoisdataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ipinforirwhoisdataconnector.md @@ -10,4 +10,71 @@ This IPinfo data connector installs an Azure Function app to download RIRWHOIS datasets and insert it into custom log table in Microsoft Sentinel +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **IPinfo API Token**: Retrieve your IPinfo API Token [here](https://ipinfo.io/). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Retrieve API Token** + +Retrieve your IPinfo API Token [here](https://ipinfo.io/). + +**2. In your Azure AD tenant, create an Azure Active Directory (AAD) application** + +In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link. + +**3. Assign the AAD application the Microsoft Sentinel Contributor Role.** + +Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same “Resource Group” you use for “Log Analytic Workspace” on which “Microsoft Sentinel” is added: Use this Link. + +**4. Get Workspace Resource ID** + +Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' + +**5. Deploy the Azure Function** + +Use this for automated deployment of the IPinfo data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-RIRWHOIS-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**. + +**1. Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + 1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-RIRWHOIS-functionapp). +2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. +3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. +4. After successful deployment of the function app, follow the next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. Go to Azure Portal for the Function App configuration. +2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. +3. In the **Application settings** tab, select **+ New application setting**. +4. Add each of the following application settings individually, with their respective string values (case-sensitive): + RESOURCE_ID + IPINFO_TOKEN + TENANT_ID + CLIENT_ID + CLIENT_SECRET + RETENTION_IN_DAYS + TOTAL_RETENTION_IN_DAYS + SCHEDULE + LOCATION +5. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ipinforwhoisdataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/ipinforwhoisdataconnector.md index b900a0d678f..7a0970c10ff 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ipinforwhoisdataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ipinforwhoisdataconnector.md @@ -10,4 +10,71 @@ This IPinfo data connector installs an Azure Function app to download RWHOIS datasets and insert it into custom log table in Microsoft Sentinel +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **IPinfo API Token**: Retrieve your IPinfo API Token [here](https://ipinfo.io/). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Retrieve API Token** + +Retrieve your IPinfo API Token [here](https://ipinfo.io/). + +**2. In your Azure AD tenant, create an Azure Active Directory (AAD) application** + +In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link. + +**3. Assign the AAD application the Microsoft Sentinel Contributor Role.** + +Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same “Resource Group” you use for “Log Analytic Workspace” on which “Microsoft Sentinel” is added: Use this Link. + +**4. Get Workspace Resource ID** + +Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' + +**5. Deploy the Azure Function** + +Use this for automated deployment of the IPinfo data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-RWHOIS-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**. + +**1. Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + 1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-RWHOIS-functionapp). +2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. +3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. +4. After successful deployment of the function app, follow the next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. Go to Azure Portal for the Function App configuration. +2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. +3. In the **Application settings** tab, select **+ New application setting**. +4. Add each of the following application settings individually, with their respective string values (case-sensitive): + RESOURCE_ID + IPINFO_TOKEN + TENANT_ID + CLIENT_ID + CLIENT_SECRET + RETENTION_IN_DAYS + TOTAL_RETENTION_IN_DAYS + SCHEDULE + LOCATION +5. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ipinfowhoisasndataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/ipinfowhoisasndataconnector.md index ce6a637d0f4..c4c2567389b 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ipinfowhoisasndataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ipinfowhoisasndataconnector.md @@ -10,4 +10,71 @@ This IPinfo data connector installs an Azure Function app to download WHOIS_ASN datasets and insert it into custom log table in Microsoft Sentinel +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **IPinfo API Token**: Retrieve your IPinfo API Token [here](https://ipinfo.io/). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Retrieve API Token** + +Retrieve your IPinfo API Token [here](https://ipinfo.io/). + +**2. In your Azure AD tenant, create an Azure Active Directory (AAD) application** + +In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link. + +**3. Assign the AAD application the Microsoft Sentinel Contributor Role.** + +Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same “Resource Group” you use for “Log Analytic Workspace” on which “Microsoft Sentinel” is added: Use this Link. + +**4. Get Workspace Resource ID** + +Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' + +**5. Deploy the Azure Function** + +Use this for automated deployment of the IPinfo data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-WHOIS-ASN-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**. + +**1. Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + 1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-WHOIS-ASN-functionapp). +2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. +3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. +4. After successful deployment of the function app, follow the next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. Go to Azure Portal for the Function App configuration. +2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. +3. In the **Application settings** tab, select **+ New application setting**. +4. Add each of the following application settings individually, with their respective string values (case-sensitive): + RESOURCE_ID + IPINFO_TOKEN + TENANT_ID + CLIENT_ID + CLIENT_SECRET + RETENTION_IN_DAYS + TOTAL_RETENTION_IN_DAYS + SCHEDULE + LOCATION +5. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ipinfowhoismntdataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/ipinfowhoismntdataconnector.md index 8f93e3fc29d..0a127478346 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ipinfowhoismntdataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ipinfowhoismntdataconnector.md @@ -10,4 +10,71 @@ This IPinfo data connector installs an Azure Function app to download WHOIS_MNT datasets and insert it into custom log table in Microsoft Sentinel +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **IPinfo API Token**: Retrieve your IPinfo API Token [here](https://ipinfo.io/). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Retrieve API Token** + +Retrieve your IPinfo API Token [here](https://ipinfo.io/). + +**2. In your Azure AD tenant, create an Azure Active Directory (AAD) application** + +In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link. + +**3. Assign the AAD application the Microsoft Sentinel Contributor Role.** + +Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same “Resource Group” you use for “Log Analytic Workspace” on which “Microsoft Sentinel” is added: Use this Link. + +**4. Get Workspace Resource ID** + +Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' + +**5. Deploy the Azure Function** + +Use this for automated deployment of the IPinfo data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-WHOIS-MNT-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**. + +**1. Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + 1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-WHOIS-MNT-functionapp). +2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. +3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. +4. After successful deployment of the function app, follow the next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. Go to Azure Portal for the Function App configuration. +2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. +3. In the **Application settings** tab, select **+ New application setting**. +4. Add each of the following application settings individually, with their respective string values (case-sensitive): + RESOURCE_ID + IPINFO_TOKEN + TENANT_ID + CLIENT_ID + CLIENT_SECRET + RETENTION_IN_DAYS + TOTAL_RETENTION_IN_DAYS + SCHEDULE + LOCATION +5. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ipinfowhoisnetdataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/ipinfowhoisnetdataconnector.md index 2b9ecb555e4..d1f4401113e 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ipinfowhoisnetdataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ipinfowhoisnetdataconnector.md @@ -10,4 +10,71 @@ This IPinfo data connector installs an Azure Function app to download WHOIS_NET datasets and insert it into custom log table in Microsoft Sentinel +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **IPinfo API Token**: Retrieve your IPinfo API Token [here](https://ipinfo.io/). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Retrieve API Token** + +Retrieve your IPinfo API Token [here](https://ipinfo.io/). + +**2. In your Azure AD tenant, create an Azure Active Directory (AAD) application** + +In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link. + +**3. Assign the AAD application the Microsoft Sentinel Contributor Role.** + +Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same “Resource Group” you use for “Log Analytic Workspace” on which “Microsoft Sentinel” is added: Use this Link. + +**4. Get Workspace Resource ID** + +Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' + +**5. Deploy the Azure Function** + +Use this for automated deployment of the IPinfo data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-WHOIS-NET-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**. + +**1. Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + 1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-WHOIS-NET-functionapp). +2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. +3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. +4. After successful deployment of the function app, follow the next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. Go to Azure Portal for the Function App configuration. +2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. +3. In the **Application settings** tab, select **+ New application setting**. +4. Add each of the following application settings individually, with their respective string values (case-sensitive): + RESOURCE_ID + IPINFO_TOKEN + TENANT_ID + CLIENT_ID + CLIENT_SECRET + RETENTION_IN_DAYS + TOTAL_RETENTION_IN_DAYS + SCHEDULE + LOCATION +5. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ipinfowhoisorgdataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/ipinfowhoisorgdataconnector.md index 6ff545711a7..68003f09347 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ipinfowhoisorgdataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ipinfowhoisorgdataconnector.md @@ -10,4 +10,71 @@ This IPinfo data connector installs an Azure Function app to download WHOIS_ORG datasets and insert it into custom log table in Microsoft Sentinel +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **IPinfo API Token**: Retrieve your IPinfo API Token [here](https://ipinfo.io/). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Retrieve API Token** + +Retrieve your IPinfo API Token [here](https://ipinfo.io/). + +**2. In your Azure AD tenant, create an Azure Active Directory (AAD) application** + +In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link. + +**3. Assign the AAD application the Microsoft Sentinel Contributor Role.** + +Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same “Resource Group” you use for “Log Analytic Workspace” on which “Microsoft Sentinel” is added: Use this Link. + +**4. Get Workspace Resource ID** + +Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' + +**5. Deploy the Azure Function** + +Use this for automated deployment of the IPinfo data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-WHOIS-ORG-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**. + +**1. Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + 1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-WHOIS-ORG-functionapp). +2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. +3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. +4. After successful deployment of the function app, follow the next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. Go to Azure Portal for the Function App configuration. +2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. +3. In the **Application settings** tab, select **+ New application setting**. +4. Add each of the following application settings individually, with their respective string values (case-sensitive): + RESOURCE_ID + IPINFO_TOKEN + TENANT_ID + CLIENT_ID + CLIENT_SECRET + RETENTION_IN_DAYS + TOTAL_RETENTION_IN_DAYS + SCHEDULE + LOCATION +5. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ipinfowhoispocdataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/ipinfowhoispocdataconnector.md index caec3d12bc3..962ae052d1c 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ipinfowhoispocdataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ipinfowhoispocdataconnector.md @@ -10,4 +10,71 @@ This IPinfo data connector installs an Azure Function app to download WHOIS_POC datasets and insert it into custom log table in Microsoft Sentinel +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **IPinfo API Token**: Retrieve your IPinfo API Token [here](https://ipinfo.io/). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Retrieve API Token** + +Retrieve your IPinfo API Token [here](https://ipinfo.io/). + +**2. In your Azure AD tenant, create an Azure Active Directory (AAD) application** + +In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link. + +**3. Assign the AAD application the Microsoft Sentinel Contributor Role.** + +Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same “Resource Group” you use for “Log Analytic Workspace” on which “Microsoft Sentinel” is added: Use this Link. + +**4. Get Workspace Resource ID** + +Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' + +**5. Deploy the Azure Function** + +Use this for automated deployment of the IPinfo data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-WHOIS-POC-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**. + +**1. Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + 1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-WHOIS-POC-functionapp). +2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. +3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. +4. After successful deployment of the function app, follow the next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. Go to Azure Portal for the Function App configuration. +2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. +3. In the **Application settings** tab, select **+ New application setting**. +4. Add each of the following application settings individually, with their respective string values (case-sensitive): + RESOURCE_ID + IPINFO_TOKEN + TENANT_ID + CLIENT_ID + CLIENT_SECRET + RETENTION_IN_DAYS + TOTAL_RETENTION_IN_DAYS + SCHEDULE + LOCATION +5. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/iscbind.md b/Tools/Solutions Analyzer/connector-docs/connectors/iscbind.md index 9653792cba7..07ae8e737ed 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/iscbind.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/iscbind.md @@ -10,4 +10,49 @@ The [ISC Bind](https://www.isc.org/bind/) connector allows you to easily connect your ISC Bind logs with Microsoft Sentinel. This gives you more insight into your organization's network traffic data, DNS query data, traffic statistics and improves your security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +**Custom Permissions:** +- **ISC Bind**: must be configured to export logs via Syslog + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias ISCBind and load the function code or click [here](https://aka.ms/sentinel-iscbind-parser).The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. + 2. Select **Apply below configuration to my machines** and select the facilities and severities. + 3. Click **Save**. +- **Open Syslog settings** + +**3. Configure and connect the ISC Bind** + +1. Follow these instructions to configure the ISC Bind to forward syslog: + - [DNS Logs](https://kb.isc.org/docs/aa-01526) +2. Configure Syslog to send the Syslog traffic to Agent. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/island-admin-polling.md b/Tools/Solutions Analyzer/connector-docs/connectors/island-admin-polling.md index dd42d5a3f98..3a09af2af2f 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/island-admin-polling.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/island-admin-polling.md @@ -10,4 +10,22 @@ The [Island](https://www.island.io) Admin connector provides the capability to ingest Island Admin Audit logs into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Custom Permissions:** +- **Island API Key**: An Island API key is required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Island to Microsoft Sentinel** + +Provide the Island API URL and Key. API URL is https://management.island.io/api/external/v1/adminActions for US or https://eu.management.island.io/api/external/v1/adminActions for EU. + Generate the API Key in the Management Console under Settings > API. +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `APIKey`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/island-user-polling.md b/Tools/Solutions Analyzer/connector-docs/connectors/island-user-polling.md index 464a7d8b3c0..92cbeda4c0b 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/island-user-polling.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/island-user-polling.md @@ -10,4 +10,22 @@ The [Island](https://www.island.io) connector provides the capability to ingest Island User Activity logs into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Custom Permissions:** +- **Island API Key**: An Island API key is required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Island to Microsoft Sentinel** + +Provide the Island API URL and Key. API URL is https://management.island.io/api/external/v1/timeline for US or https://eu.management.island.io/api/external/v1/timeline for EU. + Generate the API Key in the Management Console under Settings > API. +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `APIKey`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ivantiuem.md b/Tools/Solutions Analyzer/connector-docs/connectors/ivantiuem.md index 8862083acd7..eb59d858d9e 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ivantiuem.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ivantiuem.md @@ -10,4 +10,50 @@ The [Ivanti Unified Endpoint Management](https://www.ivanti.com/products/unified-endpoint-manager) data connector provides the capability to ingest [Ivanti UEM Alerts](https://help.ivanti.com/ld/help/en_US/LDMS/11.0/Windows/alert-c-monitoring-overview.htm) into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**IvantiUEMEvent**](https://aka.ms/sentinel-ivantiuem-parser) which is deployed with the Microsoft Sentinel Solution. + +>**NOTE:** This data connector has been developed using Ivanti Unified Endpoint Management Release 2021.1 Version 11.0.3.374 + +**1. Install and onboard the agent for Linux or Windows** + +Install the agent on the Server where the Ivanti Unified Endpoint Management Alerts are forwarded. + +> Logs from Ivanti Unified Endpoint Management Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure Ivanti Unified Endpoint Management alert forwarding.** + +[Follow the instructions](https://help.ivanti.com/ld/help/en_US/LDMS/11.0/Windows/alert-t-define-action.htm) to set up Alert Actions to send logs to syslog server. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/jamfprotectpush.md b/Tools/Solutions Analyzer/connector-docs/connectors/jamfprotectpush.md index 298854d8cba..9e2a80e6f77 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/jamfprotectpush.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/jamfprotectpush.md @@ -10,4 +10,42 @@ The [Jamf Protect](https://www.jamf.com/products/jamf-protect/) connector provides the capability to read raw event data from Jamf Protect in Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Custom Permissions:** +- **Microsoft Entra**: Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher. +- **Microsoft Azure**: Permission to assign Monitoring Metrics Publisher role on data collection rule (DCR). Typically requires Azure RBAC Owner or User Access Administrator role + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Create ARM Resources and Provide the Required Permissions** + +This connector reads data from the tables that Jamf Protect uses in a Microsoft Analytics Workspace, if the [data forwarding](https://docs.jamf.com/jamf-protect/documentation/Data_Forwarding_to_a_Third_Party_Storage_Solution.html?hl=sentinel#task-4227) option is enabled in Jamf Protect then raw event data is sent to the Microsoft Sentinel Ingestion API. +#### Automated Configuration and Secure Data Ingestion with Entra Application +Clicking on "Deploy" will trigger the creation of Log Analytics tables and a Data Collection Rule (DCR). +It will then create an Entra application, link the DCR to it, and set the entered secret in the application. This setup enables data to be sent securely to the DCR using an Entra token. +Deploy Jamf Protect connector resources + +**2. Push your logs into the workspace** + +Use the following parameters to configure the your machine to send the logs to the workspace. +- **Tenant ID (Directory ID)**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Entra App Registration Application ID**: `ApplicationId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Entra App Registration Secret**: `ApplicationSecret` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Data Collection Endpoint Uri**: `DataCollectionEndpoint` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Data Collection Rule Immutable ID**: `DataCollectionRuleId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Unified Logs Stream Name**: `Custom-jamfprotectunifiedlogs` +- **Telemetry Stream Name**: `Custom-jamfprotecttelemetryv2` +- **Alerts Stream Name**: `Custom-jamfprotectalerts` + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/jbosseap.md b/Tools/Solutions Analyzer/connector-docs/connectors/jbosseap.md index cd37c51b846..823e9787ae5 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/jbosseap.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/jbosseap.md @@ -10,4 +10,72 @@ The JBoss Enterprise Application Platform data connector provides the capability to ingest [JBoss](https://www.redhat.com/en/technologies/jboss-middleware/application-platform) events into Microsoft Sentinel. Refer to [Red Hat documentation](https://access.redhat.com/documentation/en-us/red_hat_jboss_enterprise_application_platform/7.0/html/configuration_guide/logging_with_jboss_eap) for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**JBossEvent**](https://aka.ms/sentinel-jbosseap-parser) which is deployed with the Microsoft Sentinel Solution. + +>**NOTE:** This data connector has been developed using JBoss Enterprise Application Platform 7.4.0. + +**1. Install and onboard the agent for Linux or Windows** + +Install the agent on the JBoss server where the logs are generated. + +> Logs from JBoss Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure the logs to be collected** + +Configure the custom log directory to be collected +- **Open custom logs settings** + +1. Select the link above to open your workspace advanced settings +2. Click **+Add custom** +3. Click **Browse** to upload a sample of a JBoss log file (e.g. server.log). Then, click **Next >** +4. Select **Timestamp** as the record delimiter and select Timestamp format **YYYY-MM-DD HH:MM:SS** from the dropdown list then click **Next >** +5. Select **Windows** or **Linux** and enter the path to JBoss logs based on your configuration. Example: + - **Linux** Directory: + +>Standalone server: EAP_HOME/standalone/log/server.log + +>Managed domain: EAP_HOME/domain/servers/SERVER_NAME/log/server.log + +6. After entering the path, click the '+' symbol to apply, then click **Next >** +7. Add **JBossLogs** as the custom log Name and click **Done** + +**3. Check logs in Microsoft Sentinel** + +Open Log Analytics to check if the logs are received using the JBossLogs_CL Custom log table. + +>**NOTE:** It may take up to 30 minutes before new logs will appear in JBossLogs_CL table. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/jiraauditapi.md b/Tools/Solutions Analyzer/connector-docs/connectors/jiraauditapi.md index f65276203d3..409fdfa3ac3 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/jiraauditapi.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/jiraauditapi.md @@ -10,4 +10,94 @@ The [Atlassian Jira](https://www.atlassian.com/software/jira) Audit data connector provides the capability to ingest [Jira Audit Records](https://support.atlassian.com/jira-cloud-administration/docs/audit-activities-in-jira-applications/) events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-audit-records/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **JiraAccessToken**, **JiraUsername** is required for REST API. [See the documentation to learn more about API](https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-audit-records/). Check all [requirements and follow the instructions](https://developer.atlassian.com/cloud/jira/platform/rest/v3/intro/#authentication) for obtaining credentials. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Jira REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-jiraauditapi-parser) to create the Kusto functions alias, **JiraAudit** + +**STEP 1 - Configuration steps for the Jira API** + + [Follow the instructions](https://developer.atlassian.com/cloud/jira/platform/rest/v3/intro/#authentication) to obtain the credentials. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Jira Audit data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentineljiraauditazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentineljiraauditazuredeploy-gov) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. +3. Enter the **JiraAccessToken**, **JiraUsername**, **JiraHomeSiteName** (short site name part, as example HOMESITENAME from https://community.atlassian.com) and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Jira Audit data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-jiraauditapi-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. JiraAuditXXXXX). + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select ** New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + JiraUsername + JiraAccessToken + JiraHomeSiteName + WorkspaceID + WorkspaceKey + logAnalyticsUri (optional) +> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +3. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/jiraauditccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/jiraauditccpdefinition.md index 1f110c2f90e..21137b4f019 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/jiraauditccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/jiraauditccpdefinition.md @@ -10,4 +10,43 @@ The [Atlassian Jira](https://www.atlassian.com/software/jira) Audit data connector provides the capability to ingest [Jira Audit Records](https://support.atlassian.com/jira-cloud-administration/docs/audit-activities-in-jira-applications/) events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-audit-records/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **Atlassian Jira API access**: Permission of [Administer Jira](https://developer.atlassian.com/cloud/jira/platform/rest/v3/intro/#authentication) is required to get access to the Jira Audit logs API. See [Jira API documentation](https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-audit-records/#api-group-audit-records) to learn more about the audit API. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +To enable the Atlassian Jira connector for Microsoft Sentinel, click to add an organization, fill the form with the Jira environment credentials and click to Connect. + Follow [these steps](https://support.atlassian.com/atlassian-account/docs/manage-api-tokens-for-your-atlassian-account/) to create an API token. +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Atlassian Jira organization URL** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + +**Add organization** + +*Add Atlassian Jira organization* + +When you click the "Add organization" button in the portal, a configuration form will open. You'll need to provide: + +- **Atlassian Jira organization URL** (optional): Atlassian Jira organization URL +- **User Name** (optional): User Name (e.g., user@example.com) +- **API Key** (optional): API Key + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/juniperidp.md b/Tools/Solutions Analyzer/connector-docs/connectors/juniperidp.md index cdea4ea6078..a50a1be4bc4 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/juniperidp.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/juniperidp.md @@ -10,4 +10,62 @@ The [Juniper](https://www.juniper.net/) IDP data connector provides the capability to ingest [Juniper IDP](https://www.juniper.net/documentation/us/en/software/junos/idp-policy/topics/topic-map/security-idp-overview.html) events into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on Kusto Function to work as expected [**JuniperIDP**](https://aka.ms/sentinel-JuniperIDP-parser) which is deployed with the Microsoft Sentinel Solution. + +>**NOTE:** IDP OS 5.1 and above is supported by this data connector. + +**1. Install and onboard the agent for Linux or Windows** + +Install the agent on the Server. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure the logs to be collected** + +Follow the configuration steps below to get Juniper IDP logs into Microsoft Sentinel. This configuration enriches events generated by Juniper IDP module to provide visibility on log source information for Juniper IDP logs. Refer to the [Azure Monitor Documentation](https://docs.microsoft.com/azure/azure-monitor/agents/data-sources-json) for more details on these steps. +1. Download config file [juniper_idp.conf](https://aka.ms/sentinel-JuniperIDP-conf). +2. Login to the server where you have installed Azure Log Analytics agent. +3. Copy juniper_idp.conf to the /etc/opt/microsoft/omsagent/**workspace_id**/conf/omsagent.d/ folder. +4. Edit juniper_idp.conf as follows: + + i. change the listen port for receiving logs based on your configuration (line 3) + + ii. replace **workspace_id** with real value of your Workspace ID (lines 58,59,60,63) +5. Save changes and restart the Azure Log Analytics agent for Linux service with the following command: + sudo /opt/microsoft/omsagent/bin/service_control restart +6. To configure a remote syslog destination, please reference the [SRX Getting Started - Configure System Logging](https://kb.juniper.net/InfoCenter/index?page=content&id=kb16502). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/junipersrx.md b/Tools/Solutions Analyzer/connector-docs/connectors/junipersrx.md index 20f478a3d9b..2c6c84a7cf5 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/junipersrx.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/junipersrx.md @@ -10,4 +10,50 @@ The [Juniper SRX](https://www.juniper.net/us/en/products-services/security/srx-series/) connector allows you to easily connect your Juniper SRX logs with Microsoft Sentinel. This gives you more insight into your organization's network and improves your security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +**Custom Permissions:** +- **Juniper SRX**: must be configured to export logs via Syslog + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias JuniperSRX and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Juniper%20SRX/Parsers/JuniperSRX.txt), on the second line of the query, enter the hostname(s) of your JuniperSRX device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. + 2. Select **Apply below configuration to my machines** and select the facilities and severities. + 3. Click **Save**. +- **Open Syslog settings** + +**3. Configure and connect the Juniper SRX** + +1. Follow these instructions to configure the Juniper SRX to forward syslog: + - [Traffic Logs (Security Policy Logs)](https://kb.juniper.net/InfoCenter/index?page=content&id=KB16509&actp=METADATA) + - [System Logs](https://kb.juniper.net/InfoCenter/index?page=content&id=kb16502) +2. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/keepersecuritypush2.md b/Tools/Solutions Analyzer/connector-docs/connectors/keepersecuritypush2.md index 538a641662d..113053a15f8 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/keepersecuritypush2.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/keepersecuritypush2.md @@ -10,4 +10,63 @@ The [Keeper Security](https://keepersecurity.com) connector provides the capability to read raw event data from Keeper Security in Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Custom Permissions:** +- **Microsoft Entra**: Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher. +- **Microsoft Azure**: Permission to assign Monitoring Metrics Publisher role on data collection rule (DCR). Typically requires Azure RBAC Owner or User Access Administrator role + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Create ARM Resources and Provide the Required Permissions** + +This connector reads data from the tables that Keeper Security uses in a Microsoft Analytics Workspace, if the [data forwarding](https://docs.keepersecurity.com/docs/data-forwarding) option is enabled in Keeper Security then raw event data is sent to the Microsoft Sentinel Ingestion API. +#### Automated Configuration and Secure Data Ingestion with Entra Application +Clicking on "Deploy" will trigger the creation of Log Analytics tables and a Data Collection Rule (DCR). +It will then create an Entra application, link the DCR to it, and set the entered secret in the application. This setup enables data to be sent securely to the DCR using an Entra token. +Keeper Security connector resources + +**2. Push your logs into the workspace** + +Use the following parameters to configure the your machine to send the logs to the workspace. +- **Tenant ID (Directory ID)**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Entra App Registration Application ID**: `ApplicationId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Entra App Registration Secret**: `ApplicationSecret` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Data Collection Endpoint Uri**: `DataCollectionEndpoint` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Data Collection Rule Immutable ID**: `DataCollectionRuleId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Events Logs Stream Name**: `Custom-KeeperSecurityEventNewLogs` + +**3. Update Keeper Admin Console** + +Configure the Keeper Admin Console with the Azure connection details to enable data forwarding to Microsoft Sentinel. +#### Configure Azure Monitor Logs in Keeper Admin Console + +In the [Keeper Admin Console](https://keepersecurity.com/console/), login as the Keeper Administrator. Then go to **Reporting & Alerts** and select **Azure Monitor Logs**. + +Provide the following information from Step 2 above into the Admin Console: + +- **Azure Tenant ID**: You can find this from Azure's "Subscriptions" area. +- **Application (client) ID**: This is located in the App registration (KeeperLogging) overview screen +- **Client Secret Value**: This is the Client Secret Value from the app registration secrets. +- **Endpoint URL**: This is a URL that is created in the following specific format: + `https:///dataCollectionRules//streams/?api-version=2023-01-01` + +To assemble the Endpoint URL: + +- **** This comes from Step 2 above +- **** From the Data Collector Rule, copy the "Immutable Id" value, e.g. `dcr-xxxxxxx` +- **
** This is the table name created by Azure, e.g. `Custom-KeeperSecurityEventNewLogs` + +Example: `https:///dataCollectionRules//streams/Custom-KeeperSecurityEventNewLogs?api-version=2023-01-01` + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/lastpass-polling.md b/Tools/Solutions Analyzer/connector-docs/connectors/lastpass-polling.md index 4ad1f994521..39e909aa276 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/lastpass-polling.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/lastpass-polling.md @@ -10,4 +10,21 @@ The [LastPass Enterprise](https://www.lastpass.com/products/enterprise-password-management-and-sso) connector provides the capability to LastPass reporting (audit) logs into Microsoft Sentinel. The connector provides visibility into logins and activity within LastPass (such as reading and removing passwords). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Custom Permissions:** +- **LastPass API Key and CID**: A LastPass API key and CID are required. [See the documentation to learn more about LastPass API](https://support.logmeininc.com/lastpass/help/use-the-lastpass-provisioning-api-lp010068). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect LastPass Enterprise to Microsoft Sentinel** + +Provide the LastPass Provisioning API Key. +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `APIKey`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/lookoutapi.md b/Tools/Solutions Analyzer/connector-docs/connectors/lookoutapi.md index 59ee4488a5b..2f5c3c3cc73 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/lookoutapi.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/lookoutapi.md @@ -14,4 +14,46 @@ The [Lookout](https://lookout.com) data connector provides the capability to ing

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

+## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Mobile Risk API Credentials/permissions**: **EnterpriseName** & **ApiKey** are required for Mobile Risk API. [See the documentation to learn more about API](https://enterprise.support.lookout.com/hc/en-us/articles/115002741773-Mobile-Risk-API-Guide). Check all [requirements and follow the instructions](https://enterprise.support.lookout.com/hc/en-us/articles/115002741773-Mobile-Risk-API-Guide#authenticatingwiththemobileriskapi) for obtaining credentials. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This [Lookout](https://lookout.com) data connector uses Azure Functions to connect to the Mobile Risk API to pull its events into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**LookoutEvents**](https://aka.ms/sentinel-lookoutapi-parser) which is deployed with the Microsoft Sentinel Solution. + +**STEP 1 - Configuration steps for the Mobile Risk API** + + [Follow the instructions](https://enterprise.support.lookout.com/hc/en-us/articles/115002741773-Mobile-Risk-API-Guide#authenticatingwiththemobileriskapi) to obtain the credentials. + +**STEP 2 - Follow below mentioned instructions to deploy the [Lookout](https://lookout.com) data connector and the associated Azure Function** + +>**IMPORTANT:** Before starting the deployment of the [Lookout](https://lookout.com) data connector, make sure to have the Workspace ID and Workspace Key ready (can be copied from the following). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Workspace Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Azure Resource Manager (ARM) Template** + +Follow below steps for automated deployment of the [Lookout](https://lookout.com) data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-lookoutapi-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Region**. +> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. +3. Enter the **Function Name**, **Workspace ID**,**Workspace Key**,**Enterprise Name** & **Api Key** and deploy. +4. Click **Create** to deploy. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/lookoutcloudsecuritydataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/lookoutcloudsecuritydataconnector.md index 30c63d2316e..636e664a40c 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/lookoutcloudsecuritydataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/lookoutcloudsecuritydataconnector.md @@ -10,4 +10,118 @@ This connector uses a Agari REST API connection to push data into Microsoft Sentinel Log Analytics. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Agari REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**Step-by-Step Instructions** + + As a prerequisite to this integration, first, you need to configure an API client on Lookout's Management Console. From the Management Console, you can add one or more clients and configure the appropriate permissions and actions for each. + + 1. Name - The name given to this client. + + 2. Client ID - the unique ID that was provided for this client. + + 3. Permissions - The permissions enabled for this client. The permissions you check are those that the client will be allowed to access. The listed options are Activity, Violation, Anomaly, Insights, and Profile + + 4. Service URL - The URL used to access this client.It must start with https:// + + 5. Authorized IPs - The valid IP address or addresses that apply to this client. + + 6. Actions - The actions you can take for this client. Click the icon for the action you want to perform. Editing client information, displaying the client secret, or deleting the client. + + **To add a new API client:** + + 1. Go to Administration > Enterprise Integration > API Clients and click New. + + 2. Enter a Name (required) and a Description (optional). + + 3. Enter the Client ID that was provided to you. + + 4. Select one or more Permissions from the dropdown list. + + 5. Enter one or more Authorized IP addresses for this client. Separate each address with a comma. + + 6. Click Save. + + When prompted, copy the string for the client's secret. You will need this information (along with the client ID) to authenticate to the API gateway. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Azure Blob Storage connection string and container name, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-LookoutCS-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Lookout Client ID**, **Lookout Client Secret**, **Lookout Base url**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key** +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-Lookout-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + LookoutClientId + LookoutApiSecret + Baseurl + WorkspaceID + PrimaryKey + logAnalyticsUri (Optional) + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/lookoutstreaming-definition.md b/Tools/Solutions Analyzer/connector-docs/connectors/lookoutstreaming-definition.md index f98849220aa..5dd4b4ba6be 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/lookoutstreaming-definition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/lookoutstreaming-definition.md @@ -10,4 +10,19 @@ The [Lookout Mobile Threat Detection](https://lookout.com) data connector provides the capability to ingest events related to mobile security risks into Microsoft Sentinel through the Mobile Risk API. Refer to [API documentation](https://enterprise.support.lookout.com/hc/en-us/articles/115002741773-Mobile-Risk-API-Guide) for more information. This connector helps you examine potential security risks detected in mobile devices. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions on the workspace are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Lookout Mobile Threat Defence connector to Microsoft Sentinel** +Before connecting to Lookout, ensure the following prerequisites are completed. +#### 1. **ApiKey** is required for Mobile Threat Detection API. See the [documentation](https://enterprise.support.lookout.com/hc/en-us/articles/115002741773-Mobile-Risk-API-Guide) to learn more about API. Check all requirements and follow the [instructions](https://enterprise.support.lookout.com/hc/en-us/articles/115002741773-Mobile-Risk-API-Guide#authenticatingwiththemobileriskapi) for obtaining credentials. +- **API key**: (password field) +- Click 'Connect' to establish connection + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/lumenthreatfeedconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/lumenthreatfeedconnector.md index 951fa8adeaa..5e20d529ada 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/lumenthreatfeedconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/lumenthreatfeedconnector.md @@ -10,4 +10,73 @@ The [Lumen Defender Threat Feed](https://bll-analytics.mss.lumen.com/analytics) connector provides the capability to ingest STIX-formatted threat intelligence indicators from Lumen's Black Lotus Labs research team into Microsoft Sentinel. The connector automatically downloads and uploads daily threat intelligence indicators including IPv4 addresses and domains to the ThreatIntelIndicators table via the STIX Objects Upload API. +## Permissions + +**Resource Provider Permissions:** +- **Log Analytics Workspace** (Workspace): Read and write permissions on the Log Analytics workspace are required. + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Azure Entra App Registration**: An Azure Entra application registration with the Microsoft Sentinel Contributor role assigned is required for STIX Objects API access. [See the documentation to learn more about Azure Entra applications](https://docs.microsoft.com/azure/active-directory/develop/quickstart-register-app). +- **Microsoft Sentinel Contributor Role**: Microsoft Sentinel Contributor role is required for the Azure Entra application to upload threat intelligence indicators. +- **Lumen Defender Threat Feed API Key**: A Lumen Defender Threat Feed API Key is required for accessing threat intelligence data. [Contact Lumen for API access](mailto:DefenderThreatFeedSales@Lumen.com?subject=API%20Access%20Request). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions with Durable Functions to connect to the Lumen Defender Threat Feed API and upload threat intelligence indicators to Microsoft Sentinel via the STIX Objects API. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +**1. Configuration** + +**STEP 1 - Obtain Lumen Defender Threat Feed API Key** + +1. [Contact Lumen](mailto:DefenderThreatFeedSales@Lumen.com?subject=API%20Access%20Request) to obtain API access to our Threat Feed API service +2. Obtain your API key for authentication. + +**STEP 2 - Configure Azure Entra ID Application and gather information** + +1. Create an Entra application. [See the documentation for a guide to registering an application in Microsoft Entra ID.](https://learn.microsoft.com/en-us/entra/identity-platform/quickstart-register-app) +2. Create a client secret and note the Application ID, Tenant ID, and Client Secret +4. Assign the **Microsoft Sentinel Contributor** role to the application on your Microsoft Sentinel Log Analytics Workspace +5. Make note of your Workspace ID, as well as the App Insights Workspace Resource ID, which can be obtained from the overview page of the Log Analytics Workspace for your Microsoft Sentinel instance. Click on the “JSON View” link in the top right and the Resource ID will be displayed at the top with a copy button. +- **Tenant ID**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**STEP 3 - Enable the Threat Intelligence Upload Indicators API (Preview) data connector in Microsoft Sentinel** + +1. Deploy the **Threat Intelligence (New) Solution**, which includes the **Threat Intelligence Upload Indicators API (Preview)** +2. Browse to the Content Hub, find and select the **Threat Intelligence (NEW)** solution. +3. Select the **Install/Update** button. + +**STEP 4 - Deploy the Azure Function** + +**IMPORTANT:** Before deploying the Lumen Defender Threat Feed connector, have the Tenant ID, Workspace ID, App Insights Workspace Resource ID, Azure Entra application details (Client ID, Client Secret), and Lumen API key readily available. + +1. Click the Deploy to Azure button. + +[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FLumen%20Defender%20Threat%20Feed%2FData%2520Connectors%2FLumenThreatFeed%2Fazuredeploy_Connector_LumenThreatFeed_AzureFunction.json) + +2. Fill in the appropriate values for each parameter: + +- Subscription: Confirm the correct subscription is selected or use the dropdown to change your selection +- Resource Group: Select the resource group to be used by the Function App and related resources +- Function Name: Enter a globally unique name with an 11-character limit. Adhere to your organization’s naming convention and ensure the name is globally unique since it is used (along with the uniqueString() function) to identify the ARM template being deployed. +- Workspace ID: Found in the "Overview" tab for the Log Analytics Workspace of the Microsoft Sentinel instance and provided for convenience on the connector information page. +- Lumen API Key: Obtain an API key through Lumen support +- Lumen Base URL: Filled in automatically and should generally not be changed. This URL contains API endpoints used by the connector +- Tenant ID: Obtained from the Entra App Registration overview page for the registered application (listed as Directory ID) and can also be obtained from the Tenant Information page in Azure +- Client ID: Obtained from the Entra App Registration overview page for the registered application (listed as Application ID) +- Client Secret: Obtained when the secret is created during the app registration process. It can only be viewed when first created and is hidden permanently afterwards. Rerun the app registration process to obtain a new Client Secret if necessary. +- App Insights Workspace Resource ID: Obtained from the overview page of the Log Analytics Workspace for your Microsoft Sentinel instance. Click on the "JSON View" link in the top right and the Resource ID will be displayed at the top with a copy button. +- Blob Container Name: Use the default name unless otherwise required. Azure Blob Storage is used for temporary storage and processing of threat indicators. + +**STEP 5 - Verify Deployment** + +1. The connector polls for indicator updates every 15 minutes. +2. Monitor the Function App logs in the Azure Portal to verify successful execution +3. After the app performs its first run, review the indicators ingested by either viewing the “Lumen Defender Threat Feed Overview” workbook or viewing the “Threat Intelligence” section in Microsoft Sentinel. In Microsoft Sentinel “Threat Intelligence”, filter for source “Lumen” to display only Lumen generated indicators. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/mailguard365.md b/Tools/Solutions Analyzer/connector-docs/connectors/mailguard365.md index 60ccbed88aa..e9e033a1755 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/mailguard365.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/mailguard365.md @@ -10,4 +10,26 @@ MailGuard 365 Enhanced Email Security for Microsoft 365. Exclusive to the Microsoft marketplace, MailGuard 365 is integrated with Microsoft 365 security (incl. Defender) for enhanced protection against advanced email threats like phishing, ransomware and sophisticated BEC attacks. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Configure and connect MailGuard 365** + +1. In the MailGuard 365 Console, click **Settings** on the navigation bar. +2. Click the **Integrations** tab. +3. Click the **Enable Microsoft Sentinel**. +4. Enter your workspace id and primary key from the fields below, click **Finish**. +5. For additional instructions, please contact MailGuard 365 support. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/mcafeeepo.md b/Tools/Solutions Analyzer/connector-docs/connectors/mcafeeepo.md index fc730f07406..34de0921245 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/mcafeeepo.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/mcafeeepo.md @@ -10,4 +10,45 @@ The McAfee ePolicy Orchestrator data connector provides the capability to ingest [McAfee ePO](https://www.mcafee.com/enterprise/en-us/products/epolicy-orchestrator.html) events into Microsoft Sentinel through the syslog. Refer to [documentation](https://docs.mcafee.com/bundle/epolicy-orchestrator-landing/page/GUID-0C40020F-5B7F-4549-B9CC-0E017BC8797F.html) for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>This data connector depends on a parser based on a Kusto Function to work as expected [**McAfeeEPOEvent**](https://aka.ms/sentinel-McAfeeePO-parser) which is deployed with the Microsoft Sentinel Solution. + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + +1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. +2. Select **Apply below configuration to my machines** and select the facilities and severities. +3. Click **Save**. +- **Open Syslog settings** + +**3. Configure McAfee ePolicy Orchestrator event forwarding to Syslog server** + +[Follow these instructions](https://kcm.trellix.com/corporate/index?page=content&id=KB87927) to add register syslog server. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/mcafeensp.md b/Tools/Solutions Analyzer/connector-docs/connectors/mcafeensp.md index 2e54fe1aaff..8f5a1588805 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/mcafeensp.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/mcafeensp.md @@ -10,4 +10,56 @@ The [McAfee® Network Security Platform](https://www.mcafee.com/enterprise/en-us/products/network-security-platform.html) data connector provides the capability to ingest [McAfee® Network Security Platform events](https://docs.mcafee.com/bundle/network-security-platform-10.1.x-integration-guide-unmanaged/page/GUID-8C706BE9-6AC9-4641-8A53-8910B51207D8.html) into Microsoft Sentinel. Refer to [McAfee® Network Security Platform](https://docs.mcafee.com/bundle/network-security-platform-10.1.x-integration-guide-unmanaged/page/GUID-F7D281EC-1CC9-4962-A7A3-5A9D9584670E.html) for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**McAfeeNSPEvent**](https://aka.ms/sentinel-mcafeensp-parser) which is deployed with the Microsoft Sentinel Solution. + +>**NOTE:** This data connector has been developed using McAfee® Network Security Platform version: 10.1.x + +**1. Install and onboard the agent for Linux or Windows** + +Install the agent on the Server where the McAfee® Network Security Platform logs are forwarded. + +> Logs from McAfee® Network Security Platform Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure McAfee® Network Security Platform event forwarding** + +Follow the configuration steps below to get McAfee® Network Security Platform logs into Microsoft Sentinel. +1. [Follow these instructions](https://docs.mcafee.com/bundle/network-security-platform-10.1.x-product-guide/page/GUID-E4A687B0-FAFB-4170-AC94-1D968A10380F.html) to forward alerts from the Manager to a syslog server. +2. Add a syslog notification profile, [more details here](https://docs.mcafee.com/bundle/network-security-platform-10.1.x-product-guide/page/GUID-5BADD5D7-21AE-4E3B-AEE2-A079F3FD6A38.html). This is mandatory. While creating profile, to make sure that events are formatted correctly, enter the following text in the Message text box: + :|SENSOR_ALERT_UUID|ALERT_TYPE|ATTACK_TIME|ATTACK_NAME|ATTACK_ID + |ATTACK_SEVERITY|ATTACK_SIGNATURE|ATTACK_CONFIDENCE|ADMIN_DOMAIN|SENSOR_NAME|INTERFACE + |SOURCE_IP|SOURCE_PORT|DESTINATION_IP|DESTINATION_PORT|CATEGORY|SUB_CATEGORY + |DIRECTION|RESULT_STATUS|DETECTION_MECHANISM|APPLICATION_PROTOCOL|NETWORK_PROTOCOL| + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/microsoftazurepurview.md b/Tools/Solutions Analyzer/connector-docs/connectors/microsoftazurepurview.md index e3f5e5cb3bb..7083f6c4464 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/microsoftazurepurview.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/microsoftazurepurview.md @@ -10,4 +10,28 @@ Connect to Microsoft Purview to enable data sensitivity enrichment of Microsoft Sentinel. Data classification and sensitivity label logs from Microsoft Purview scans can be ingested and visualized through workbooks, analytical rules, and more. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2224125&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Microsoft Purview account Owner or Contributor role to set up Diagnostic Settings. Microsoft Contributor role with write permissions to enable data connector, view workbook, and create analytic rules. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Microsoft Purview to Microsoft Sentinel** + +Within the Azure Portal, navigate to your Purview resource: + 1. In the search bar, search for **Purview accounts.** + 2. Select the specific account that you would like to be set up with Sentinel. + +Inside your Microsoft Purview resource: + 3. Select **Diagnostic Settings.** + 4. Select **+ Add diagnostic setting.** + 5. In the **Diagnostic setting** blade: + - Select the Log Category as **DataSensitivityLogEvent**. + - Select **Send to Log Analytics**. + - Chose the log destination workspace. This should be the same workspace that is used by **Microsoft Sentinel.** + - Click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/microsoftcloudappsecurity.md b/Tools/Solutions Analyzer/connector-docs/connectors/microsoftcloudappsecurity.md index f6870cc954a..e5157cd0631 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/microsoftcloudappsecurity.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/microsoftcloudappsecurity.md @@ -26,4 +26,35 @@ By connecting with [Microsoft Defender for Cloud Apps](https://aka.ms/asi-mcas-c [Deploy now >](https://aka.ms/asi-mcas-connector-deploynow) +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. + +**Licenses:** +- Microsoft Defender for Cloud Apps + +**Tenant Permissions:** +Requires GlobalAdmin, SecurityAdmin on the workspace's tenant + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Microsoft Defender for Cloud Apps to Microsoft Sentinel** + +In the Microsoft Defender for Cloud Apps portal, under Settings, select Security extensions and then SIEM and set Microsoft Sentinel as your SIEM agent. For more information, see [Microsoft Defender for Cloud Apps](https://aka.ms/azuresentinelmcas) . + +After you connect Microsoft Defender for Cloud Apps, the alerts and discovery logs are sent to this Microsoft Sentinel workspace.​ +**Select Microsoft Defender for Cloud Apps Data Types** + +In the Microsoft Sentinel portal, select which data types to enable: + +- ☐ **Alerts** +- ☐ **Cloud Discovery Logs (Preview)** + +Each data type may have specific licensing requirements. Review the information provided for each type in the portal before enabling. + +> 💡 **Portal-Only Feature**: Data type selection is only available in the Microsoft Sentinel portal. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/microsoftcopilot.md b/Tools/Solutions Analyzer/connector-docs/connectors/microsoftcopilot.md index 6036ab4baaf..3632a75d596 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/microsoftcopilot.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/microsoftcopilot.md @@ -10,4 +10,21 @@ The Microsoft Copilot logs connector in Microsoft Sentinel enables the seamless ingestion of Copilot-generated activity logs into Microsoft Sentinel for advanced threat detection, investigation, and response. It collects telemetry from Microsoft Copilot services - such as usage data, prompts and system responses - and ingests into Microsoft Sentinel, allowing security teams to monitor for misuse, detect anomalies, and maintain compliance with organizational policies. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. + +**Custom Permissions:** +- **Tenant Permissions**: 'Security Administrator' or 'Global Administrator' on the workspace's tenant. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Microsoft Copilot audit logs to Microsoft Sentinel** + +This connector uses the Office Management API to get your Microsoft Copilot audit logs. The logs will be stored and processed in your existing Microsoft Sentinel workspace. You can find the data in the **LLMActivity** table. +- Click 'Connect' to establish connection + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/microsoftdefenderadvancedthreatprotection.md b/Tools/Solutions Analyzer/connector-docs/connectors/microsoftdefenderadvancedthreatprotection.md index cbfda037901..2b52b2f0f9d 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/microsoftdefenderadvancedthreatprotection.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/microsoftdefenderadvancedthreatprotection.md @@ -10,4 +10,26 @@ Microsoft Defender for Endpoint is a security platform designed to prevent, detect, investigate, and respond to advanced threats. The platform creates alerts when suspicious security events are seen in an organization. Fetch alerts generated in Microsoft Defender for Endpoint to Microsoft Sentinel so that you can effectively analyze security events. You can create rules, build dashboards and author playbooks for immediate response. For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2220128&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. + +**Licenses:** +- Microsoft Defender for Endpoint + +**Tenant Permissions:** +Requires GlobalAdmin, SecurityAdmin on the workspace's tenant + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Microsoft Defender for Endpoint alerts to Microsoft Sentinel** + +> Connecting Microsoft Defender for Endpoint will cause your data that is collected by Microsoft Defender for Endpoint service to be stored and processed in the location that you have configured your Microsoft Sentinel workspace. +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `MicrosoftDefenderATP`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + +ℹ️ Microsoft Defender for Endpoint Advanced Hunting raw logs are available as part of the Microsoft 365 Defender (Preview) connector + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/microsoftdefenderforcloudtenantbased.md b/Tools/Solutions Analyzer/connector-docs/connectors/microsoftdefenderforcloudtenantbased.md index 62d33b69301..fab7ffa7c5f 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/microsoftdefenderforcloudtenantbased.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/microsoftdefenderforcloudtenantbased.md @@ -10,4 +10,23 @@ Microsoft Defender for Cloud is a security management tool that allows you to detect and quickly respond to threats across Azure, hybrid, and multi-cloud workloads. This connector allows you to stream your MDC security alerts from Microsoft 365 Defender into Microsoft Sentinel, so you can can leverage the advantages of XDR correlations connecting the dots across your cloud resources, devices and identities and view the data in workbooks, queries and investigate and respond to incidents. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2269832&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. + +**Tenant Permissions:** +Requires SecurityAdmin, GlobalAdmin on the workspace's tenant + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Tenant-based Microsoft Defender for Cloud to Microsoft Sentinel** + +After connecting this connector, **all** your Microsoft Defender for Cloud subscriptions' alerts will be sent to this Microsoft Sentinel workspace. + +> Your Microsoft Defender for Cloud alerts are connected to stream through the Microsoft 365 Defender. To benefit from automated grouping of the alerts into incidents, connect the Microsoft 365 Defender incidents connector. Incidents can be viewed in the incidents queue. +Tenant-based Microsoft Defender for Cloud + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/microsoftdefenderthreatintelligence.md b/Tools/Solutions Analyzer/connector-docs/connectors/microsoftdefenderthreatintelligence.md index 4cb4587a9b1..5cf2c4fea7c 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/microsoftdefenderthreatintelligence.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/microsoftdefenderthreatintelligence.md @@ -10,4 +10,17 @@ Microsoft Sentinel provides you the capability to import threat intelligence generated by Microsoft to enable monitoring, alerting and hunting. Use this data connector to import Indicators of Compromise (IOCs) from Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel. Threat indicators can include IP addresses, domains, URLs, and file hashes, etc. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Use this data connector to import Indicators of Compromise (IOCs) from Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel.** + +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `MicrosoftThreatIntelligence`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/microsoftpurviewinformationprotection.md b/Tools/Solutions Analyzer/connector-docs/connectors/microsoftpurviewinformationprotection.md index c6294a7115a..d07df607286 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/microsoftpurviewinformationprotection.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/microsoftpurviewinformationprotection.md @@ -12,4 +12,21 @@ Microsoft Purview Information Protection helps you discover, classify, protect, Integrate Microsoft Purview Information Protection logs with Microsoft Sentinel to view dashboards, create custom alerts and improve investigation. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2223811&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. + +**Custom Permissions:** +- **License**: Enterprise Mobility + Security E5/A5 or Microsoft 365 E5/A5 or P2 + +**Tenant Permissions:** +Requires GlobalAdmin, SecurityAdmin on the workspace's tenant + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Microsoft Purview Information Protection audit logs to Microsoft Sentinel** + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/microsoftsysmonforlinux.md b/Tools/Solutions Analyzer/connector-docs/connectors/microsoftsysmonforlinux.md index ca3146894ca..855347b51fe 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/microsoftsysmonforlinux.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/microsoftsysmonforlinux.md @@ -12,4 +12,51 @@ [Sysmon for linux link:]. The Sysmon for Linux connector uses [Syslog](https://aka.ms/sysLogInfo) as its data ingestion method. This solution depends on ASIM to work as expected. [Deploy ASIM](https://aka.ms/DeployASIM) to get the full value from the solution. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>This data connector depends on ASIM parsers based on a Kusto Functions to work as expected. [Deploy the parsers](https://aka.ms/ASimSysmonForLinuxARM) + + The following functions will be deployed: + + - vimFileEventLinuxSysmonFileCreated, vimFileEventLinuxSysmonFileDeleted + + - vimProcessCreateLinuxSysmon, vimProcessTerminateLinuxSysmon + + - vimNetworkSessionLinuxSysmon + +[Read more](https://aka.ms/AboutASIM) + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + +1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. +2. Select **Apply below configuration to my machines** and select the facilities and severities. +3. Click **Save**. +- **Open Syslog settings** + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/microsoftthreatprotection.md b/Tools/Solutions Analyzer/connector-docs/connectors/microsoftthreatprotection.md index 0c1646d2102..a0ef5e449e1 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/microsoftthreatprotection.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/microsoftthreatprotection.md @@ -28,4 +28,27 @@ Microsoft Defender XDR suite includes: For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220004&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. + +**Custom Permissions:** +- **License**: M365 E5, M365 A5 or any other Microsoft Defender XDR eligible license. + +**Tenant Permissions:** +Requires GlobalAdmin, SecurityAdmin on the workspace's tenant + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect incidents & alerts** + +Connect Microsoft Defender XDR incidents to your Microsoft Sentinel. Incidents will appear in the incidents queue. +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `MicrosoftThreatProtection`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + +**2. Connect events** +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `MicrosoftDefenderATPEvents`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/mimecastatapi.md b/Tools/Solutions Analyzer/connector-docs/connectors/mimecastatapi.md index b1510c31a07..79594fdba91 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/mimecastatapi.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/mimecastatapi.md @@ -22,4 +22,119 @@ The Mimecast products included within the connector are: +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Azure Subscription**: Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group. +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: See the documentation to learn more about API on the [Rest API reference](https://integrations.mimecast.com/documentation/) + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Resource group** + +You need to have a resource group created with a subscription you are going to use. + +**2. Functions app** + +You need to have an Azure App registered for this connector to use +1. Application Id +2. Tenant Id +3. Client Id +4. Client Secret +5. Entra Object ID + +>**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - App Registration steps for the Application in Microsoft Entra ID** + + This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID: + 1. Sign in to the [Azure portal](https://portal.azure.com/). + 2. Search for and select **Microsoft Entra ID**. + 3. Under **Manage**, select **App registrations > New registration**. + 4. Enter a display **Name** for your application. + 5. Select **Register** to complete the initial app registration. + 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Mimecast Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app) + +**STEP 2 - Add a client secret for application in Microsoft Entra ID** + + Sometimes called an application password, a client secret is a string value required for the execution of Mimecast Data Connector. Follow the steps in this section to create a new Client Secret: + 1. In the Azure portal, in **App registrations**, select your application. + 2. Select **Certificates & secrets > Client secrets > New client secret**. + 3. Add a description for your client secret. + 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months. + 5. Select **Add**. + 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Mimecast Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret) + +**STEP 3 - Get Object ID of your application in Microsoft Entra ID** + + After creating your app registration, follow the steps in this section to get Object ID: + 1. Go to **Microsoft Entra ID**. + 2. Select **Enterprise applications** from the left menu. + 3. Find your newly created application in the list (you can search by the name you provided). + 4. Click on the application. + 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment. + +**STEP 4 - Deploy Mimecast API Connector** + +>**IMPORTANT:** Before deploying the Mimecast API connector, have the Mimecast API authorization key(s) or Token, readily available. + +**7. Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Mimecast Awareness Training Data connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastAT-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-MimecastAT-azuredeploy-gov) +2. Select the preferred **Subscription**, **Resource Group** and **Region**. +3. Enter the below information : + + a. Location - The location in which the data collection rules and data collection endpoints should be deployed + + b. WorkspaceName - Enter Microsoft Sentinel Workspace Name of Log Analytics workspace + + c. AzureClientID - Enter Azure Client ID that you have created during app registration + + d. AzureClientSecret - Enter Azure Client Secret that you have created during creating the client secret + + e. AzureTenantID - Enter Azure Tenant ID of your Azure Active Directory + + f. AzureEntraObjectID - Enter Object id of your Microsoft Entra App + + g. MimecastBaseURL - Enter Base URL of Mimecast API 2.0 (e.g. https://api.services.mimecast.com) + + h. MimecastClientID - Enter Mimecast Client ID for authentication + + i. MimecastClientSecret - Enter Mimecast Client Secret for authentication + + j. MimecastAwarenessPerformanceDetailsTableName - Enter name of the table used to store Awareness Performance Details data. Default is 'Awareness_Performance_Details' + + k. MimecastAwarenessUserDataTableName - Enter name of the table used to store Awareness User Data data. Default is 'Awareness_User_Data' + + l. MimecastAwarenessWatchlistDetailsTableName - Enter name of the table used to store Awareness Watchlist Details data. Default is 'Awareness_Watchlist_Details' + + m. MimecastAwarenessSafeScoreDetailsTableName - Enter name of the table used to store Awareness SafeScore Details data. Default is 'Awareness_SafeScore_Details' + + n. StartDate - Enter the start date in the 'yyyy-mm-dd' format. If you do not provide a date, data from the last 60 days will be fetched automatically. Ensure that the date is in the past and properly formatted + + o. Schedule - Please enter a valid Quartz cron-expression. (Example: 0 0 */1 * * *) Do not keep the value empty, minimum value is 10 minutes + + p. LogLevel - Please add log level or log severity value. By default it is set to INFO + + q. AppInsightsWorkspaceResourceId - Migrate Classic Application Insights to Log Analytic Workspace which is retiring by 29 Febraury 2024. Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/mimecastauditapi.md b/Tools/Solutions Analyzer/connector-docs/connectors/mimecastauditapi.md index a1a9d8238d8..badef859ab4 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/mimecastauditapi.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/mimecastauditapi.md @@ -16,4 +16,106 @@ Audit +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Azure Subscription**: Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group. +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: See the documentation to learn more about API on the [Rest API reference](https://integrations.mimecast.com/documentation/) + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**1. Configuration:** + +**STEP 1 - Configuration steps for the Mimecast API** + +Go to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later) + +**STEP 2 - Deploy Mimecast API Connector** + +>**IMPORTANT:** Before deploying the Mimecast API connector, have the Mimecast API authorization key(s) or Token, readily available. + +**STEP 3 - App Registration steps for the Application in Microsoft Entra ID** + + This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID: + 1. Sign in to the [Azure portal](https://portal.azure.com/). + 2. Search for and select **Microsoft Entra ID**. + 3. Under **Manage**, select **App registrations > New registration**. + 4. Enter a display **Name** for your application. + 5. Select **Register** to complete the initial app registration. + 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TenableVM Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app) + +**STEP 4 - Add a client secret for application in Microsoft Entra ID** + + Sometimes called an application password, a client secret is a string value required for the execution of TenableVM Data Connector. Follow the steps in this section to create a new Client Secret: + 1. In the Azure portal, in **App registrations**, select your application. + 2. Select **Certificates & secrets > Client secrets > New client secret**. + 3. Add a description for your client secret. + 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months. + 5. Select **Add**. + 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TenableVM Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret) + +**STEP 5 - Get Object ID of your application in Microsoft Entra ID** + + After creating your app registration, follow the steps in this section to get Object ID: + 1. Go to **Microsoft Entra ID**. + 2. Select **Enterprise applications** from the left menu. + 3. Find your newly created application in the list (you can search by the name you provided). + 4. Click on the application. + 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment. + +**6. Deploy the Mimecast Audit Data Connector:** + +Use this method for automated deployment of the Mimecast Audit Data connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastAuditAzureDeploy-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-MimecastAuditAzureDeploy-azuredeploy-gov) +2. Select the preferred **Subscription**, **Resource Group** and **Region**. +3. Enter the below information : + + a. Location - The location in which the data collection rules and data collection endpoints should be deployed + + b. WorkspaceName - Enter Microsoft Sentinel Workspace Name of Log Analytics workspace + + c. AzureClientID - Enter Azure Client ID that you have created during app registration + + d. AzureClientSecret - Enter Azure Client Secret that you have created during creating the client secret + + e. AzureTenantID - Enter Azure Tenant ID of your Azure Active Directory + + f. AzureEntraObjectID - Enter Object id of your Microsoft Entra App + + g. MimecastBaseURL - Enter Base URL of Mimecast API 2.0 (e.g. https://api.services.mimecast.com) + + h. MimecastClientID - Enter Mimecast Client ID for authentication + + i. MimecastClientSecret - Enter Mimecast Client Secret for authentication + + j. MimecastAuditTableName - Enter name of the table used to store Audit data. Default is 'Audit' + + k. StartDate - Enter the start date in the 'yyyy-mm-dd' format. If you do not provide a date, data from the last 60 days will be fetched automatically. Ensure that the date is in the past and properly formatted + + l. Schedule - Please enter a valid Quartz cron-expression. (Example: 0 0 */1 * * *) Do not keep the value empty, minimum value is 10 minutes + + m. LogLevel - Please add log level or log severity value. By default it is set to INFO + + n. AppInsightsWorkspaceResourceId - Migrate Classic Application Insights to Log Analytic Workspace which is retiring by 29 Febraury 2024. Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/mimecastciapi.md b/Tools/Solutions Analyzer/connector-docs/connectors/mimecastciapi.md index 0dca44dd04f..9148cfd6b66 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/mimecastciapi.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/mimecastciapi.md @@ -10,4 +10,118 @@ The data connector for [Mimecast Cloud Integrated](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Cloud Integrated inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Azure Subscription**: Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group. +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: See the documentation to learn more about API on the [Rest API reference](https://integrations.mimecast.com/documentation/) + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Resource group** + +You need to have a resource group created with a subscription you are going to use. + +**2. Functions app** + +You need to have an Azure App registered for this connector to use +1. Application Id +2. Tenant Id +3. Client Id +4. Client Secret + +>**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**3. Configuration:** + +**STEP 1 - Configuration steps for the Mimecast API** + +Go to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later) + +**STEP 2 - Deploy Mimecast API Connector** + +>**IMPORTANT:** Before deploying the Mimecast API connector, have the Mimecast API authorization key(s) or Token, readily available. + +**STEP 3 - App Registration steps for the Application in Microsoft Entra ID** + + This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID: + 1. Sign in to the [Azure portal](https://portal.azure.com/). + 2. Search for and select **Microsoft Entra ID**. + 3. Under **Manage**, select **App registrations > New registration**. + 4. Enter a display **Name** for your application. + 5. Select **Register** to complete the initial app registration. + 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TenableVM Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app) + +**STEP 4 - Add a client secret for application in Microsoft Entra ID** + + Sometimes called an application password, a client secret is a string value required for the execution of TenableVM Data Connector. Follow the steps in this section to create a new Client Secret: + 1. In the Azure portal, in **App registrations**, select your application. + 2. Select **Certificates & secrets > Client secrets > New client secret**. + 3. Add a description for your client secret. + 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months. + 5. Select **Add**. + 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TenableVM Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret) + +**STEP 5 - Get Object ID of your application in Microsoft Entra ID** + + After creating your app registration, follow the steps in this section to get Object ID: + 1. Go to **Microsoft Entra ID**. + 2. Select **Enterprise applications** from the left menu. + 3. Find your newly created application in the list (you can search by the name you provided). + 4. Click on the application. + 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment. + +**8. Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Mimecast Cloud Integrated Data connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastCI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-MimecastCI-azuredeploy-gov) +2. Select the preferred **Subscription**, **Resource Group** and **Region**. +3. Enter the below information : + + a. Location - The location in which the data collection rules and data collection endpoints should be deployed + + b. WorkspaceName - Enter Microsoft Sentinel Workspace Name of Log Analytics workspace + + c. AzureClientID - Enter Azure Client ID that you have created during app registration + + d. AzureClientSecret - Enter Azure Client Secret that you have created during creating the client secret + + e. AzureTenantID - Enter Azure Tenant ID of your Azure Active Directory + + f. AzureEntraObjectID - Enter Object id of your Microsoft Entra App + + g. MimecastBaseURL - Enter Base URL of Mimecast API 2.0 (e.g. https://api.services.mimecast.com) + + h. MimecastClientID - Enter Mimecast Client ID for authentication + + i. MimecastClientSecret - Enter Mimecast Client Secret for authentication + + j. MimecastCITableName - Enter name of the table used to store Cloud Integrated data. Default is 'Cloud_Integrated' + + k. StartDate - Enter the start date in the 'yyyy-mm-dd' format. If you do not provide a date, data from the last 60 days will be fetched automatically. Ensure that the date is in the past and properly formatted + + l. Schedule - Please enter a valid Quartz cron-expression. (Example: 0 0 */1 * * *) Do not keep the value empty, minimum value is 10 minutes + + m. LogLevel - Please add log level or log severity value. By default it is set to INFO + + n. AppInsightsWorkspaceResourceId - Migrate Classic Application Insights to Log Analytic Workspace which is retiring by 29 Febraury 2024. Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/mimecastsegapi.md b/Tools/Solutions Analyzer/connector-docs/connectors/mimecastsegapi.md index 3e7b30b1f0b..4b7030a2701 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/mimecastsegapi.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/mimecastsegapi.md @@ -16,4 +16,108 @@ The data connector for [Mimecast Secure Email Gateway](https://integrations.mime +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Azure Subscription**: Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group. +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: See the documentation to learn more about API on the [Rest API reference](https://integrations.mimecast.com/documentation/) + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**1. Configuration:** + +**STEP 1 - Configuration steps for the Mimecast API** + +Go to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later) + +****STEP 2 - Deploy Mimecast API Connector** + +>**IMPORTANT:** Before deploying the Mimecast API connector, have the Mimecast API authorization key(s) or Token, readily available. + +**STEP 3 - App Registration steps for the Application in Microsoft Entra ID** + + This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID: + 1. Sign in to the [Azure portal](https://portal.azure.com/). + 2. Search for and select **Microsoft Entra ID**. + 3. Under **Manage**, select **App registrations > New registration**. + 4. Enter a display **Name** for your application. + 5. Select **Register** to complete the initial app registration. + 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TenableVM Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app) + +**STEP 4 - Add a client secret for application in Microsoft Entra ID** + + Sometimes called an application password, a client secret is a string value required for the execution of TenableVM Data Connector. Follow the steps in this section to create a new Client Secret: + 1. In the Azure portal, in **App registrations**, select your application. + 2. Select **Certificates & secrets > Client secrets > New client secret**. + 3. Add a description for your client secret. + 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months. + 5. Select **Add**. + 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TenableVM Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret) + +**STEP 5 - Get Object ID of your application in Microsoft Entra ID** + + After creating your app registration, follow the steps in this section to get Object ID: + 1. Go to **Microsoft Entra ID**. + 2. Select **Enterprise applications** from the left menu. + 3. Find your newly created application in the list (you can search by the name you provided). + 4. Click on the application. + 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment. + +**6. Deploy the Mimecast Secure Email Gateway Data Connector:** + +Use this method for automated deployment of the Mimecast Secure Email Gateway Data connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastSEGAzureDeploy-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-MimecastSEGAzureDeploy-azuredeploy-gov) +2. SSelect the preferred **Subscription**, **Resource Group** and **Region**. +3. Enter the below information : + + a. Location - The location in which the data collection rules and data collection endpoints should be deployed + + b. WorkspaceName - Enter Microsoft Sentinel Workspace Name of Log Analytics workspace + + c. AzureClientID - Enter Azure Client ID that you have created during app registration + + d. AzureClientSecret - Enter Azure Client Secret that you have created during creating the client secret + + e. AzureTenantID - Enter Azure Tenant ID of your Azure Active Directory + + f. AzureEntraObjectID - Enter Object id of your Microsoft Entra App + + g. MimecastBaseURL - Enter Base URL of Mimecast API 2.0 (e.g. https://api.services.mimecast.com) + + h. MimecastClientID - Enter Mimecast Client ID for authentication + + i. MimecastClientSecret - Enter Mimecast Client Secret for authentication + + j. MimecastCGTableName - Enter name of the table used to store CG data. Default is 'Seg_Cg' + + k. MimecastDLPTableName - Enter name of the table used to store DLP data. Default is 'Seg_Dlp' + + l. StartDate - Enter the start date in the 'yyyy-mm-dd' format. If you do not provide a date, data from the last 60 days will be fetched automatically. Ensure that the date is in the past and properly formatted + + m. Schedule - Please enter a valid Quartz cron-expression. (Example: 0 0 */1 * * *) Do not keep the value empty, minimum value is 10 minutes + + n. LogLevel - Please add log level or log severity value. By default it is set to INFO + + o. AppInsightsWorkspaceResourceId - Migrate Classic Application Insights to Log Analytic Workspace which is retiring by 29 Febraury 2024. Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/mimecastsiemapi.md b/Tools/Solutions Analyzer/connector-docs/connectors/mimecastsiemapi.md index 6a4157cbc80..6ac434c8604 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/mimecastsiemapi.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/mimecastsiemapi.md @@ -16,4 +16,83 @@ The data connector for [Mimecast Secure Email Gateway](https://integrations.mime +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Mimecast API credentials**: You need to have the following pieces of information to configure the integration: +- mimecastEmail: Email address of a dedicated Mimecast admin user +- mimecastPassword: Password for the dedicated Mimecast admin user +- mimecastAppId: API Application Id of the Mimecast Microsoft Sentinel app registered with Mimecast +- mimecastAppKey: API Application Key of the Mimecast Microsoft Sentinel app registered with Mimecast +- mimecastAccessKey: Access Key for the dedicated Mimecast admin user +- mimecastSecretKey: Secret Key for the dedicated Mimecast admin user +- mimecastBaseURL: Mimecast Regional API Base URL + +> The Mimecast Application Id, Application Key, along with the Access Key and Secret keys for the dedicated Mimecast admin user are obtainable via the Mimecast Administration Console: Administration | Services | API and Platform Integrations. + +> The Mimecast API Base URL for each region is documented here: https://integrations.mimecast.com/documentation/api-overview/global-base-urls/ +- **Resource group**: You need to have a resource group created with a subscription you are going to use. +- **Functions app**: You need to have an Azure App registered for this connector to use +1. Application Id +2. Tenant Id +3. Client Id +4. Client Secret + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**1. Configuration:** + +**STEP 1 - Configuration steps for the Mimecast API** + +Go to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later) + +**STEP 2 - Deploy Mimecast API Connector** + +>**IMPORTANT:** Before deploying the Mimecast API connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Mimecast API authorization key(s) or Token, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Deploy the Mimecast Secure Email Gateway Data Connector:** + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastSEG-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the following fields: + - appName: Unique string that will be used as id for the app in Azure platform + - objectId: Azure portal ---> Azure Active Directory ---> more info ---> Profile -----> Object ID + - appInsightsLocation(default): westeurope + - mimecastEmail: Email address of dedicated user for this integraion + - mimecastPassword: Password for dedicated user + - mimecastAppId: Application Id from the Microsoft Sentinel app registered with Mimecast + - mimecastAppKey: Application Key from the Microsoft Sentinel app registered with Mimecast + - mimecastAccessKey: Access Key for the dedicated Mimecast user + - mimecastSecretKey: Secret Key for dedicated Mimecast user + - mimecastBaseURL: Regional Mimecast API Base URL + - activeDirectoryAppId: Azure portal ---> App registrations ---> [your_app] ---> Application ID + - activeDirectoryAppSecret: Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> [your_app_secret] + - workspaceId: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Workspace ID (or you can copy workspaceId from above) + - workspaceKey: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Primary Key (or you can copy workspaceKey from above) + - AppInsightsWorkspaceResourceID : Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Properties ---> Resource ID + + >Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. + +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +6. Go to ***Azure portal ---> Resource groups ---> [your_resource_group] ---> [appName](type: Storage account) ---> Storage Explorer ---> BLOB CONTAINERS ---> SIEM checkpoints ---> Upload*** and create empty file on your machine named checkpoint.txt, dlp-checkpoint.txt and select it for upload (this is done so that date_range for SIEM logs is stored in consistent state) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/mimecasttiregionalconnectorazurefunctions.md b/Tools/Solutions Analyzer/connector-docs/connectors/mimecasttiregionalconnectorazurefunctions.md index 8c54e565c9a..551b683ca9d 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/mimecasttiregionalconnectorazurefunctions.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/mimecasttiregionalconnectorazurefunctions.md @@ -18,4 +18,87 @@ Mimecast products and features required: +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Mimecast API credentials**: You need to have the following pieces of information to configure the integration: +- mimecastEmail: Email address of a dedicated Mimecast admin user +- mimecastPassword: Password for the dedicated Mimecast admin user +- mimecastAppId: API Application Id of the Mimecast Microsoft Sentinel app registered with Mimecast +- mimecastAppKey: API Application Key of the Mimecast Microsoft Sentinel app registered with Mimecast +- mimecastAccessKey: Access Key for the dedicated Mimecast admin user +- mimecastSecretKey: Secret Key for the dedicated Mimecast admin user +- mimecastBaseURL: Mimecast Regional API Base URL + +> The Mimecast Application Id, Application Key, along with the Access Key and Secret keys for the dedicated Mimecast admin user are obtainable via the Mimecast Administration Console: Administration | Services | API and Platform Integrations. + +> The Mimecast API Base URL for each region is documented here: https://integrations.mimecast.com/documentation/api-overview/global-base-urls/ +- **Resource group**: You need to have a resource group created with a subscription you are going to use. +- **Functions app**: You need to have an Azure App registered for this connector to use +1. Application Id +2. Tenant Id +3. Client Id +4. Client Secret + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**1. Configuration:** + +**STEP 1 - Configuration steps for the Mimecast API** + +Go to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later) + +**STEP 2 - Deploy Mimecast API Connector** + +>**IMPORTANT:** Before deploying the Mimecast API connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Mimecast API authorization key(s) or Token, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Enable Mimecast Intelligence for Microsoft - Microsoft Sentinel Connector:** + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastTIRegional-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the following fields: + - appName: Unique string that will be used as id for the app in Azure platform + - objectId: Azure portal ---> Azure Active Directory ---> more info ---> Profile -----> Object ID + - appInsightsLocation(default): westeurope + - mimecastEmail: Email address of dedicated user for this integraion + - mimecastPassword: Password for dedicated user + - mimecastAppId: Application Id from the Microsoft Sentinel app registered with Mimecast + - mimecastAppKey: Application Key from the Microsoft Sentinel app registered with Mimecast + - mimecastAccessKey: Access Key for the dedicated Mimecast user + - mimecastSecretKey: Secret Key for dedicated Mimecast user + - mimecastBaseURL: Regional Mimecast API Base URL + - activeDirectoryAppId: Azure portal ---> App registrations ---> [your_app] ---> Application ID + - activeDirectoryAppSecret: Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> [your_app_secret] + - workspaceId: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Workspace ID (or you can copy workspaceId from above) + - workspaceKey: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Primary Key (or you can copy workspaceKey from above) + - AppInsightsWorkspaceResourceID : Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Properties ---> Resource ID + + >Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. + +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +6. Go to ***Azure portal ---> Resource groups ---> [your_resource_group] ---> [appName](type: Storage account) ---> Storage Explorer ---> BLOB CONTAINERS ---> TIR checkpoints ---> Upload*** and create empty file on your machine named checkpoint.txt and select it for upload (this is done so that date_range for TIR logs is stored in consistent state) + +**4. Additional configuration:** + +>Connect to a **Threat Intelligence Platforms** Data Connector. Follow instructions on the connector page and then click connect button. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/mimecastttpapi.md b/Tools/Solutions Analyzer/connector-docs/connectors/mimecastttpapi.md index 201e2b7c465..32263399126 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/mimecastttpapi.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/mimecastttpapi.md @@ -20,4 +20,116 @@ The Mimecast products included within the connector are: +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Azure Subscription**: Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group. +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: See the documentation to learn more about API on the [Rest API reference](https://integrations.mimecast.com/documentation/) + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Resource group** + +You need to have a resource group created with a subscription you are going to use. + +**2. Functions app** + +You need to have an Azure App registered for this connector to use +1. Application Id +2. Tenant Id +3. Client Id +4. Client Secret + +>**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - App Registration steps for the Application in Microsoft Entra ID** + + This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID: + 1. Sign in to the [Azure portal](https://portal.azure.com/). + 2. Search for and select **Microsoft Entra ID**. + 3. Under **Manage**, select **App registrations > New registration**. + 4. Enter a display **Name** for your application. + 5. Select **Register** to complete the initial app registration. + 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Mimecast Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app) + +**STEP 2 - Add a client secret for application in Microsoft Entra ID** + + Sometimes called an application password, a client secret is a string value required for the execution of Mimecast Data Connector. Follow the steps in this section to create a new Client Secret: + 1. In the Azure portal, in **App registrations**, select your application. + 2. Select **Certificates & secrets > Client secrets > New client secret**. + 3. Add a description for your client secret. + 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months. + 5. Select **Add**. + 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Mimecast Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret) + +**STEP 3 - Get Object ID of your application in Microsoft Entra ID** + + After creating your app registration, follow the steps in this section to get Object ID: + 1. Go to **Microsoft Entra ID**. + 2. Select **Enterprise applications** from the left menu. + 3. Find your newly created application in the list (you can search by the name you provided). + 4. Click on the application. + 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment. + +**STEP 4 - Deploy Mimecast API Connector** + +>**IMPORTANT:** Before deploying the Mimecast API connector, have the Mimecast API authorization key(s) or Token, readily available. + +**7. Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Mimecast Targeted Threat Protection Data connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastTTPAzureDeploy-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-MimecastTTPAzureDeploy-azuredeploy-gov) +2. Select the preferred **Subscription**, **Resource Group** and **Region**. +3. Enter the below information : + + a. Location - The location in which the data collection rules and data collection endpoints should be deployed + + b. WorkspaceName - Enter Microsoft Sentinel Workspace Name of Log Analytics workspace + + c. AzureClientID - Enter Azure Client ID that you have created during app registration + + d. AzureClientSecret - Enter Azure Client Secret that you have created during creating the client secret + + e. AzureTenantID - Enter Azure Tenant ID of your Azure Active Directory + + f. AzureEntraObjectID - Enter Object id of your Microsoft Entra App + + g. MimecastBaseURL - Enter Base URL of Mimecast API 2.0 (e.g. https://api.services.mimecast.com) + + h. MimecastClientID - Enter Mimecast Client ID for authentication + + i. MimecastClientSecret - Enter Mimecast Client Secret for authentication + + j. StartDate - Enter the start date in the 'yyyy-mm-dd' format. If you do not provide a date, data from the last 60 days will be fetched automatically. Ensure that the date is in the past and properly formatted + + k. MimecastTTPAttachmentTableName - Enter name of the table used to store TTP Attachment data. Default is 'Ttp_Attachment' + + l. MimecastTTPImpersonationTableName - Enter name of the table used to store TTP Impersonation data. Default is 'Ttp_Impersonation' + + m. MimecastTTPUrlTableName - Enter name of the table used to store TTP Attachment data. Default is 'Ttp_Url' + + n. Schedule - Please enter a valid Quartz cron-expression. (Example: 0 0 */1 * * *) Do not keep the value empty, minimum value is 10 minutes + + l. LogLevel - Please add log level or log severity value. By default it is set to INFO + + o. AppInsightsWorkspaceResourceId - Migrate Classic Application Insights to Log Analytic Workspace which is retiring by 29 Febraury 2024. Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/misp2sentinelconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/misp2sentinelconnector.md index 796c6a465e0..78137fcb35a 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/misp2sentinelconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/misp2sentinelconnector.md @@ -10,4 +10,19 @@ This solution installs the MISP2Sentinel connector that allows you to automatically push threat indicators from MISP to Microsoft Sentinel via the Upload Indicators REST API. After installing the solution, configure and enable this data connector by following guidance in Manage solution view. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Installation and setup instructions** + +Use the documentation from this GitHub repository to install and configure the MISP to Microsoft Sentinel connector: + +https://github.com/cudeso/misp2sentinel + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/mongodb.md b/Tools/Solutions Analyzer/connector-docs/connectors/mongodb.md index 4c178ce7d42..174ff35d758 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/mongodb.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/mongodb.md @@ -10,4 +10,72 @@ MongoDB data connector provides the capability to ingest [MongoDBAudit](https://www.mongodb.com/) into Microsoft Sentinel. Refer to [MongoDB documentation](https://www.mongodb.com/docs/manual/tutorial/getting-started/) for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias MongoDBAudit and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MongoDBAudit/Parsers/MongoDBAudit.txt) on the second line of the query, enter the hostname(s) of your MongoDBAudit device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux or Windows** + +Install the agent on the Tomcat Server where the logs are generated. + +> Logs from MongoDB Enterprise Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure MongoDBAudit to write logs to files** + +Edit mongod.conf file (for Linux) or mongod.cfg (for Windows) to write logs to files: + +>**dbPath**: data/db + +>**path**: data/db/auditLog.json + +Set the following parameters: **dbPath** and **path**. Refer to the [MongoDB documentation for more details](https://www.mongodb.com/docs/manual/tutorial/configure-auditing/) + +**3. Configure the logs to be collected** + +Configure the custom log directory to be collected +- **Open custom logs settings** + +1. Select the link above to open your workspace advanced settings +2. From the left pane, select **Settings**, select **Custom Logs** and click **+Add custom log** +3. Click **Browse** to upload a sample of a MongoDBAudit log file. Then, click **Next >** +4. Select **Timestamp** as the record delimiter and click **Next >** +5. Select **Windows** or **Linux** and enter the path to MongoDBAudit logs based on your configuration +6. After entering the path, click the '+' symbol to apply, then click **Next >** +7. Add **MongoDBAudit** as the custom log Name (the '_CL' suffix will be added automatically) and click **Done**. + +**3. Validate connectivity** + +It may take upwards of 20 minutes until your logs start to appear in Microsoft Sentinel. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/mongodbatlaslogsazurefunctions.md b/Tools/Solutions Analyzer/connector-docs/connectors/mongodbatlaslogsazurefunctions.md index 5d6df49cf1a..9d243d07d71 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/mongodbatlaslogsazurefunctions.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/mongodbatlaslogsazurefunctions.md @@ -10,4 +10,50 @@ The [MongoDBAtlas](https://www.mongodb.com/products/platform/atlas-database) Logs connector gives the capability to upload MongoDB Atlas database logs into Microsoft Sentinel through the MongoDB Atlas Administration API. Refer to the [API documentation](https://www.mongodb.com/docs/api/doc/atlas-admin-api-v2/) for more information. The connector provides the ability to get a range of database log messages for the specified hosts and specified project. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: MongoDB Atlas service account **Client ID** and **Client Secret** are required. [See the documentation to learn more about creating a service account](https://www.mongodb.com/docs/atlas/configure-api-access/#grant-programmatic-access-to-an-organization) + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to 'MongoDB Atlas' to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>Ensure the workspace is added to Microsoft Sentinel before deploying the connector. + +**1. STEP 1 - Configuration steps for the 'MongoDB Atlas Administration API'** + +1. [Follow these instructions](https://www.mongodb.com/docs/atlas/configure-api-access/#grant-programmatic-access-to-an-organization) to create a MongoDB Atlas service account. +2. Copy the **Client ID** and **Client Secret** you created, also the **Group ID** (Project) and each **Cluster ID** (Hostname) required for later steps. +3. Refer [MongoDB Atlas API documentation](https://www.mongodb.com/docs/api/doc/atlas-admin-api-v2/operation/operation-downloadgroupclusterlog) for more details. +4. The client secret can be passed into the connector via an Azure key vault or directly into the connector. +5. If you want to use the key vault option create a key vault, using a Vault Access Policy, with a secret named **mongodb-client-secret** and your client secret saved as the secret value. + +**2. STEP 2 - Deploy the 'MongoDB Atlas Logs' connector and the associated Azure Function** + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#view/Microsoft_Azure_CreateUIDef/CustomDeploymentBlade/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FMongoDBAtlas%2FData%20Connectors%2FMongoDBAtlasLogs%2Fazuredeploy_Connector_MongoDBAtlasLogs_AzureFunction.json/uiFormDefinitionUri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FMongoDBAtlas%2FData%20Connectors%2FMongoDBAtlasLogs%2FcreateUiDef.json) + +**3. STEP 3 - Set the connector parameters** + +1. Select the preferred **Subscription** and an existing **Resource Group**. +2. Enter an existing **Log Analytics Workspace Resource ID** belonging to the resource group. +3. Click **Next** +4. Enter the **MongoDB Group ID**, a list of up to 10 **MongoDB Cluster IDs**, each on a separate line, and **MongoDB Client ID**. +5. Choose for **Authentication Method** either **Client Secret** and copy in your client secret value or **Key Vault** and copy in the name of your key vault. +Click **Next** +6. Review the MongoDB filters. Select logs from at least one category. Click **Next** +7. Review the schedule. Click **Next** +8. Review the settings then click **Create**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/morphisecccf.md b/Tools/Solutions Analyzer/connector-docs/connectors/morphisecccf.md index 9f91ee18402..b3fdb3d47d4 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/morphisecccf.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/morphisecccf.md @@ -14,4 +14,23 @@ This solution provides more than just data ingestion; it equips your security te With this solution, you can empower your SOC to leverage Morphisec's powerful threat prevention within a unified investigation and response workflow in Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Configure Morphisec Connector** + +1. Create an API key client in Morphisec Console with read permissions to fetch alerts. +2. Provide the Client ID and Client Secret in the connector configuration. +- **Morphisec Base URL**: https://.morphisec.cloud +- **Client ID**: Enter the Client ID +- **Client Secret**: (password field) +- **Tenant ID**: Enter your Morphisec Tenant ID +- Click 'Connect to Morphisec' to establish connection + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/mulesoft.md b/Tools/Solutions Analyzer/connector-docs/connectors/mulesoft.md index bd53883c196..d307d5ef899 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/mulesoft.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/mulesoft.md @@ -10,4 +10,100 @@ The [MuleSoft Cloudhub](https://www.mulesoft.com/platform/saas/cloudhub-ipaas-cloud-based-integration) data connector provides the capability to retrieve logs from Cloudhub applications using the Cloudhub API and more events into Microsoft Sentinel through the REST API. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **MuleSoftEnvId**, **MuleSoftAppName**, **MuleSoftUsername** and **MuleSoftPassword** are required for making API calls. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Azure Blob Storage API to pull logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**MuleSoftCloudhub**](https://aka.ms/sentinel-MuleSoftCloudhub-parser) which is deployed with the Microsoft Sentinel Solution. + +**Note: This data connector fetch only the logs of the CloudHub application using Platform API and not of CloudHub 2.0 application** + +**STEP 1 - Configuration steps for the MuleSoft Cloudhub API** + + Follow the instructions to obtain the credentials. + +1. Obtain the **MuleSoftEnvId**, **MuleSoftAppName**, **MuleSoftUsername** and **MuleSoftPassword** using the [documentation](https://help.mulesoft.com/s/article/How-to-get-Cloudhub-application-information-using-Anypoint-Platform-API). +2. Save credentials for using in the data connector. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the MuleSoft Cloudhub data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the MuleSoft Cloudhub data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MuleSoftCloudhubAPI-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. +3. Enter the **MuleSoftEnvId**, **MuleSoftAppName**, **MuleSoftUsername** and **MuleSoftPassword** and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**Option 2 - Manual Deployment of Azure Functions** + + Use the following step-by-step instructions to deploy the MuleSoft Cloudhub data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-MuleSoftCloudhubAPI-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. MuleSoftXXXXX). + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select ** New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + MuleSoftEnvId + MuleSoftAppName + MuleSoftUsername + MuleSoftPassword + WorkspaceID + WorkspaceKey + logAnalyticsUri (optional) +> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/nasuniedgeappliance.md b/Tools/Solutions Analyzer/connector-docs/connectors/nasuniedgeappliance.md index 52ae96d34bd..d1b106fa993 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/nasuniedgeappliance.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/nasuniedgeappliance.md @@ -10,4 +10,44 @@ The [Nasuni](https://www.nasuni.com/) connector allows you to easily connect your Nasuni Edge Appliance Notifications and file system audit logs with Microsoft Sentinel. This gives you more insight into activity within your Nasuni infrastructure and improves your security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Follow the configuration steps below to configure your Linux machine to send Nasuni event information to Microsoft Sentinel. Refer to the [Azure Monitor Agent documenation](https://learn.microsoft.com/en-us/azure/azure-monitor/agents/agents-overview) for additional details on these steps. +Configure the facilities you want to collect and their severities. +1. Select the link below to open your workspace agents configuration, and select the Syslog tab. +2. Select Add facility and choose from the drop-down list of facilities. Repeat for all the facilities you want to add. +3. Mark the check boxes for the desired severities for each facility. +4. Click Apply. +- **Open Syslog settings** + +**3. Configure Nasuni Edge Appliance settings** + +Follow the instructions in the [Nasuni Management Console Guide](https://view.highspot.com/viewer/629a633ae5b4caaf17018daa?iid=5e6fbfcbc7143309f69fcfcf) to configure Nasuni Edge Appliances to forward syslog events. Use the IP address or hostname of the Linux device running the Azure Monitor Agent in the Servers configuration field for the syslog settings. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/netclean-proactive-incidents.md b/Tools/Solutions Analyzer/connector-docs/connectors/netclean-proactive-incidents.md index a795fc8b03e..c5e0039a1f0 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/netclean-proactive-incidents.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/netclean-proactive-incidents.md @@ -10,4 +10,39 @@ This connector uses the Netclean Webhook (required) and Logic Apps to push data into Microsoft Sentinel Log Analytics +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** NetClean ProActive uses a Webhook to expose incident data, Azure Logic Apps is used to receive and push data to Log Analytics This might result in additional data ingestion costs. + It's possible to test this without Logic Apps or NetClean Proactive see option 2 +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**1. Option 1: Logic app** + +1. Create a new logic app + Use When a HTTP request is recived as the Trigger and save it. It will now have generated a URL that can be used in the ProActive webconsole configuration. + Add an action: + Select the Azure Log Analytics Data Collector and choose Send Data + Enter Connection Name, Workspace ID and Workspace Key, you will find the information needed in your Log Analytics workspace under Settings-->Agents-->Log Analytics agent instructions. + In JSON Request body add @triggerBody(). in Custom Log Name add Netclean_Incidents. + +**2. Option 2 (Testing only)** + +Ingest data using a api function. please use the script found on + https://learn.microsoft.com/en-us/azure/azure-monitor/logs/data-collector-api?tabs=powershell +Replace the CustomerId and SharedKey values with your values +Replace the content in $json variable to the sample data found here: https://github.com/Azure/Azure-Sentinel/blob/master/Sample%20Data/Custom/Netclean_Incidents_CL.json . +Set the LogType varible to **Netclean_Incidents_CL** +Run the script + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/netskope.md b/Tools/Solutions Analyzer/connector-docs/connectors/netskope.md index 70344c55076..e474806d900 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/netskope.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/netskope.md @@ -10,4 +10,93 @@ The [Netskope Cloud Security Platform](https://www.netskope.com/platform) connector provides the capability to ingest Netskope logs and events into Microsoft Sentinel. The connector provides visibility into Netskope Platform Events and Alerts in Microsoft Sentinel to improve monitoring and investigation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Netskope API Token**: A Netskope API Token is required. [See the documentation to learn more about Netskope API](https://innovatechcloud.goskope.com/docs/Netskope_Help/en/rest-api-v1-overview.html). **Note:** A Netskope account is required + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to Netskope to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Netskope and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskope/Parsers/Netskope.txt), on the second line of the query, enter the hostname(s) of your Netskope device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Configuration steps for the Netskope API** + + [Follow these instructions](https://docs.netskope.com/en/rest-api-v1-overview.html) provided by Netskope to obtain an API Token. **Note:** A Netskope account is required + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Netskope connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Netskope API Authorization Token, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +This method provides an automated deployment of the Netskope connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-netskope-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID**, **Workspace Key**, **API Key**, and **URI**. + - Use the following schema for the `uri` value: `https://.goskope.com` Replace `` with your domain. + - The default **Time Interval** is set to pull the last five (5) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. + - The default **Log Types** is set to pull all 6 available log types (`alert, page, application, audit, infrastructure, network`), remove any are not required. + - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. +6. After successfully deploying the connector, download the Kusto Function to normalize the data fields. [Follow the steps](https://aka.ms/sentinelgithubparsersnetskope) to use the Kusto function alias, **Netskope**. + +**4. Option 2 - Manual Deployment of Azure Functions** + +This method provides the step-by-step instructions to deploy the Netskope connector manually with Azure Function. + +**1. Create a Function App** + +1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp), and select **+ Add**. +2. In the **Basics** tab, ensure Runtime stack is set to **Powershell Core**. +3. In the **Hosting** tab, ensure the **Consumption (Serverless)** plan type is selected. +4. Make other preferrable configuration changes, if needed, then click **Create**. + +**2. Import Function App Code** + +1. In the newly created Function App, select **Functions** on the left pane and click **+ Add**. +2. Select **Timer Trigger**. +3. Enter a unique Function **Name** and modify the cron schedule, if needed. The default value is set to run the Function App every 5 minutes. (Note: the Timer trigger should match the `timeInterval` value below to prevent overlapping data), click **Create**. +4. Click on **Code + Test** on the left pane. +5. Copy the [Function App Code](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/Netskope/Data%20Connectors/Netskope/AzureFunctionNetskope/run.ps1) and paste into the Function App `run.ps1` editor. +5. Click **Save**. + +**3. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following seven (7) application settings individually, with their respective string values (case-sensitive): + apikey + workspaceID + workspaceKey + uri + timeInterval + logTypes + logAnalyticsUri (optional) +> - Enter the URI that corresponds to your region. The `uri` value must follow the following schema: `https://.goskope.com` - There is no need to add subsquent parameters to the Uri, the Function App will dynamically append the parameteres in the proper format. +> - Set the `timeInterval` (in minutes) to the default value of `5` to correspond to the default Timer Trigger of every `5` minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly to prevent overlapping data ingestion. +> - Set the `logTypes` to `alert, page, application, audit, infrastructure, network` - This list represents all the avaliable log types. Select the log types based on logging requirements, seperating each by a single comma. +> - Note: If using Azure Key Vault, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. +> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. +5. After successfully deploying the connector, download the Kusto Function to normalize the data fields. [Follow the steps](https://aka.ms/sentinelgithubparsersnetskope) to use the Kusto function alias, **Netskope**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/netskopealertsevents.md b/Tools/Solutions Analyzer/connector-docs/connectors/netskopealertsevents.md index 4cdd3d96d96..e7515e89bd7 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/netskopealertsevents.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/netskopealertsevents.md @@ -10,4 +10,100 @@ Netskope Security Alerts and Events +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **Netskope organisation url**: The Netskope data connector requires you to provide your organisation url. You can find your organisation url by signing into the Netskope portal. +- **Netskope API key**: The Netskope data connector requires you to provide a valid API key. You can create one by following the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. STEP 1 - Create a Netskope API key.** + +Follow the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/) for guidance on this step. + +**2. STEP 2 - Enter your Netskope product Details** + +Enter your Netskope organisation url & API Token below: +- **Organisation Url**: Enter your organisation url +- **API Key**: (password field) +- **Netskope Alerts Remediation** (select) + - Yes + - No +- **Netskope Alerts Uba** (select) + - Yes + - No +- **Netskope Alerts Security Assessment** (select) + - Yes + - No +- **Netskope Alerts Quarantine** (select) + - Yes + - No +- **Netskope Alerts Policy** (select) + - Yes + - No +- **Netskope Alerts Malware** (select) + - Yes + - No +- **Netskope Alerts Malsite** (select) + - Yes + - No +- **Netskope Alerts DLP** (select) + - Yes + - No +- **Netskope Alerts CTEP** (select) + - Yes + - No +- **Netskope Alerts Watchlist** (select) + - Yes + - No +- **Netskope Alerts Compromised Credentials** (select) + - Yes + - No +- **Netskope Alerts Content** (select) + - Yes + - No +- **Netskope Alerts Device** (select) + - Yes + - No +- **Netskope Events Application** (select) + - Yes + - No +- **Netskope Events Audit** (select) + - Yes + - No +- **Netskope Events Connection** (select) + - Yes + - No +- **Netskope Events DLP** (select) + - Yes + - No +- **Netskope Events Endpoint** (select) + - Yes + - No +- **Netskope Events Infrastructure** (select) + - Yes + - No +- **Netskope Events Network** (select) + - Yes + - No +- **Netskope Events Page** (select) + - Yes + - No +**OPTIONAL: Specify the Index the API uses.** + + **Configuring the index is optional and only required in advanced scenario's.** + Netskope uses an [index](https://docs.netskope.com/en/using-the-rest-api-v2-dataexport-iterator-endpoints/#how-do-iterator-endpoints-function) to retrieve events. In some advanced cases (consuming the event in multiple Microsoft Sentinel workspaces, or pre-fatiguing the index to only retrieve recent data), a customer might want to have direct control over the index. + - **Index**: NetskopeCCP + +**3. STEP 3 - Click Connect** + +Verify all fields above were filled in correctly. Press the Connect to connect Netskope to Microsoft Sentinel. +- Click 'connect' to establish connection + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/netskopedataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/netskopedataconnector.md index 2224b8841d7..e932cb1b8e6 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/netskopedataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/netskopedataconnector.md @@ -40,4 +40,92 @@ The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api > https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Azure Subscription**: Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group. +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - App Registration steps for the Application in Microsoft Entra ID** + + This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID: + 1. Sign in to the [Azure portal](https://portal.azure.com/). + 2. Search for and select **Microsoft Entra ID**. + 3. Under **Manage**, select **App registrations > New registration**. + 4. Enter a display **Name** for your application. + 5. Select **Register** to complete the initial app registration. + 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app) + +**STEP 2 - Add a client secret for application in Microsoft Entra ID** + + Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret: + 1. In the Azure portal, in **App registrations**, select your application. + 2. Select **Certificates & secrets > Client secrets > New client secret**. + 3. Add a description for your client secret. + 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months. + 5. Select **Add**. + 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret) + +**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID** + + Follow the steps in this section to assign the role: + 1. In the Azure portal, Go to **Resource Group** and select your resource group. + 2. Go to **Access control (IAM)** from left panel. + 3. Click on **Add**, and then select **Add role assignment**. + 4. Select **Contributor** as role and click on next. + 5. In **Assign access to**, select `User, group, or service principal`. + 6. Click on **add members** and type **your app name** that you have created and select it. + 7. Now click on **Review + assign** and then again click on **Review + assign**. + +> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal) + +**STEP 4 - Steps to create/get Credentials for the Netskope account** + + Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**: + 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar. + 2. Click on Tools and then **REST API v2** + 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from. + 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage. + +**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection** + +>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the below information : + Netskope HostName + Netskope API Token + Select Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events + Log Level + Workspace ID + Workspace Key +4. Click on **Review+Create**. +5. Then after validation click on **Create** to deploy. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/netskopewebtransactionsdataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/netskopewebtransactionsdataconnector.md index f0b02e02656..61f95ee4b1d 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/netskopewebtransactionsdataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/netskopewebtransactionsdataconnector.md @@ -22,4 +22,103 @@ The [Netskope Web Transactions](https://docs.netskope.com/en/netskope-help/data- +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Azure Subscription**: Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group. +- **Microsoft.Compute permissions**: Read and write permissions to Azure VMs is required. [See the documentation to learn more about Azure VMs](https://learn.microsoft.com/azure/virtual-machines/overview). +- **TransactionEvents Credentials and Permissions**: **Netskope Tenant** and **Netskope API Token** is required. [See the documentation to learn more about Transaction Events.](https://docs.netskope.com/en/netskope-help/data-security/transaction-events/netskope-transaction-events/) +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector provides the functionality of ingesting Netskope Web Transactions data using a docker image to be deployed on a virtual machine (Either Azure VM/On Premise VM). Check the [Azure VM pricing page](https://azure.microsoft.com/pricing/details/virtual-machines/linux) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Steps to create/get Credentials for the Netskope account** + + Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**: + 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar. + 2. Click on Tools and then **REST API v2** + 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from. + 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage. + +**STEP 2 - Choose one from the following two deployment options to deploy the docker based data connector to ingest Netskope Web Transactions data ** + +>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available, as well as the Netskope API Authorization Key(s) [Make sure the token has permissions for transaction events]. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Using Azure Resource Manager (ARM) Template to deploy VM [Recommended]** + +Using the ARM template deploy an Azure VM, install the prerequisites and start execution. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2WebTransactions-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the below information : + Docker Image Name (mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions) + Netskope HostName + Netskope API Token + Seek Timestamp (The epoch timestamp that you want to seek the pubsublite pointer, can be left empty) + Workspace ID + Workspace Key + Backoff Retry Count (The retry count for token related errors before restarting the execution.) + Backoff Sleep Time (Number of seconds to sleep before retrying) + Idle Timeout (Number of seconds to wait for Web Transactions Data before restarting execution) + VM Name + Authentication Type + Admin Password or Key + DNS Label Prefix + Ubuntu OS Version + Location + VM Size + Subnet Name + Network Security Group Name + Security Type +4. Click on **Review+Create**. +5. Then after validation click on **Create** to deploy. + +**4. Option 2 - Manual Deployment on previously created virtual machine** + +Use the following step-by-step instructions to deploy the docker based data connector manually on a previously created virtual machine. + +**1. Install docker and pull docker Image** + +>**NOTE:** Make sure that the VM is linux based (preferably Ubuntu). + +1. Firstly you will need to [SSH into the virtual machine](https://learn.microsoft.com/azure/virtual-machines/linux-vm-connect?tabs=Linux). +2. Now install [docker engine](https://docs.docker.com/engine/install/). +3. Now pull the docker image from docker hub using the command: 'sudo docker pull mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions'. +4. Now to run the docker image use the command: 'sudo docker run -it -v $(pwd)/docker_persistent_volume:/app mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions'. You can replace mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions with the image id. Here docker_persistent_volume is the name of the folder that would be created on the vm in which the files will get stored. + +**2. Configure the Parameters** + +1. Once the docker image is running it will ask for the required parameters. +2. Add each of the following application settings individually, with their respective values (case-sensitive): + Netskope HostName + Netskope API Token + Seek Timestamp (The epoch timestamp that you want to seek the pubsublite pointer, can be left empty) + Workspace ID + Workspace Key + Backoff Retry Count (The retry count for token related errors before restarting the execution.) + Backoff Sleep Time (Number of seconds to sleep before retrying) + Idle Timeout (Number of seconds to wait for Web Transactions Data before restarting execution) +3. Now the execution has started but is in interactive mode, so that shell cannot be stopped. To run it as a background process, stop the current execution by pressing Ctrl+C and then use the command: 'sudo docker run -d -v $(pwd)/docker_persistent_volume:/app mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions'. + +**3. Stop the docker container** + +1. Use the command 'sudo docker container ps' to list the running docker containers. Note down your container id. +2. Now stop the container using the command: 'sudo docker stop *<*container-id*>*'. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/netwrix.md b/Tools/Solutions Analyzer/connector-docs/connectors/netwrix.md index 1cd2afeaa4a..4dc923b8016 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/netwrix.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/netwrix.md @@ -10,4 +10,59 @@ Netwrix Auditor data connector provides the capability to ingest [Netwrix Auditor (formerly Stealthbits Privileged Activity Manager)](https://www.netwrix.com/auditor.html) events into Microsoft Sentinel. Refer to [Netwrix documentation](https://helpcenter.netwrix.com/) for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on NetwrixAuditor parser based on a Kusto Function to work as expected. This parser is installed along with solution installation. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Configure Netwrix Auditor to send logs using CEF** + +[Follow the instructions](https://www.netwrix.com/download/QuickStart/Netwrix_Auditor_Add-on_for_HPE_ArcSight_Quick_Start_Guide.pdf) to configure event export from Netwrix Auditor. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/netwrixama.md b/Tools/Solutions Analyzer/connector-docs/connectors/netwrixama.md index 94e5b11cbea..57e01eca3fa 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/netwrixama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/netwrixama.md @@ -10,4 +10,59 @@ Netwrix Auditor data connector provides the capability to ingest [Netwrix Auditor (formerly Stealthbits Privileged Activity Manager)](https://www.netwrix.com/auditor.html) events into Microsoft Sentinel. Refer to [Netwrix documentation](https://helpcenter.netwrix.com/) for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on NetwrixAuditor parser based on a Kusto Function to work as expected. This parser is installed along with solution installation. +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Configure Netwrix Auditor to send logs using CEF** + + [Follow the instructions](https://www.netwrix.com/download/QuickStart/Netwrix_Auditor_Add-on_for_HPE_ArcSight_Quick_Start_Guide.pdf) to configure event export from Netwrix Auditor. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/nginxhttpserver.md b/Tools/Solutions Analyzer/connector-docs/connectors/nginxhttpserver.md index 13413e8d7c0..9190024b749 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/nginxhttpserver.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/nginxhttpserver.md @@ -10,4 +10,59 @@ The NGINX HTTP Server data connector provides the capability to ingest [NGINX](https://nginx.org/en/) HTTP Server events into Microsoft Sentinel. Refer to [NGINX Logs documentation](https://nginx.org/en/docs/http/ngx_http_log_module.html) for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias NGINXHTTPServer and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NGINX%20HTTP%20Server/Parsers/NGINXHTTPServer.txt).The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux or Windows** + +Install the agent on the NGINX HTTP Server where the logs are generated. + +> Logs from NGINX HTTP Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure the logs to be collected** + +Configure the custom log directory to be collected +- **Open custom logs settings** + +1. Select the link above to open your workspace advanced settings +2. From the left pane, select **Data**, select **Custom Logs** and click **Add+** +3. Click **Browse** to upload a sample of a NGINX HTTP Server log file (e.g. access.log or error.log). Then, click **Next >** +4. Select **New line** as the record delimiter and click **Next >** +5. Select **Windows** or **Linux** and enter the path to NGINX HTTP logs based on your configuration. Example: + - **Linux** Directory: '/var/log/nginx/*.log' +6. After entering the path, click the '+' symbol to apply, then click **Next >** +7. Add **NGINX_CL** as the custom log Name and click **Done** + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/nonamesecuritymicrosoftsentinel.md b/Tools/Solutions Analyzer/connector-docs/connectors/nonamesecuritymicrosoftsentinel.md index 88f67e54922..e89b98ceabc 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/nonamesecuritymicrosoftsentinel.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/nonamesecuritymicrosoftsentinel.md @@ -10,4 +10,22 @@ Noname Security solution to POST data into a Microsoft Sentinel SIEM workspace via the Azure Monitor REST API +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Configure the Noname Sentinel integration.** + +Configure the Sentinel workflow in the Noname integrations settings. Find documentation at https://docs.nonamesecurity.com +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/nordpass.md b/Tools/Solutions Analyzer/connector-docs/connectors/nordpass.md index 3207a6568d0..4a0363c28f4 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/nordpass.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/nordpass.md @@ -10,4 +10,22 @@ Integrating NordPass with Microsoft Sentinel SIEM via the API will allow you to automatically transfer Activity Log data from NordPass to Microsoft Sentinel and get real-time insights, such as item activity, all login attempts, and security notifications. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +To proceed with the Microsoft Sentinel setup + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-Nordpass-azuredeploy) +2. **Please note that after the successful deployment, the system pulls Activity Log data every 1 minute by default.** + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/nozominetworksn2os.md b/Tools/Solutions Analyzer/connector-docs/connectors/nozominetworksn2os.md index a72af3bebe7..70acab36124 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/nozominetworksn2os.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/nozominetworksn2os.md @@ -10,4 +10,65 @@ The [Nozomi Networks](https://www.nozominetworks.com/) data connector provides the capability to ingest Nozomi Networks Events into Microsoft Sentinel. Refer to the Nozomi Networks [PDF documentation](https://www.nozominetworks.com/resources/data-sheets-brochures-learning-guides/) for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**NozomiNetworksEvents**](https://aka.ms/sentinel-NozomiNetworks-parser) which is deployed with the Microsoft Sentinel Solution. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Common Event Format (CEF) logs to Syslog agent** + +Follow these steps to configure Nozomi Networks device for sending Alerts, Audit Logs, Health Logs log via syslog in CEF format: + +> 1. Log in to the Guardian console. + +> 2. Navigate to Administration->Data Integration, press +Add and select the Common Event Format (CEF) from the drop down + +> 3. Create New Endpoint using the appropriate host information and enable Alerts, Audit Logs, Health Logs for sending. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/nozominetworksn2osama.md b/Tools/Solutions Analyzer/connector-docs/connectors/nozominetworksn2osama.md index 97f3de63fbd..b02279af2c2 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/nozominetworksn2osama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/nozominetworksn2osama.md @@ -10,4 +10,65 @@ The [Nozomi Networks](https://www.nozominetworks.com/) data connector provides the capability to ingest Nozomi Networks Events into Microsoft Sentinel. Refer to the Nozomi Networks [PDF documentation](https://www.nozominetworks.com/resources/data-sheets-brochures-learning-guides/) for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**NozomiNetworksEvents**](https://aka.ms/sentinel-NozomiNetworks-parser) which is deployed with the Microsoft Sentinel Solution. +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Common Event Format (CEF) logs to Syslog agent** + + Follow these steps to configure Nozomi Networks device for sending Alerts, Audit Logs, Health Logs log via syslog in CEF format: + +> 1. Log in to the Guardian console. + +> 2. Navigate to Administration->Data Integration, press +Add and select the Common Event Format (CEF) from the drop down + +> 3. Create New Endpoint using the appropriate host information and enable Alerts, Audit Logs, Health Logs for sending. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/nucleuscyberncprotect.md b/Tools/Solutions Analyzer/connector-docs/connectors/nucleuscyberncprotect.md index 028cc4f13b9..6d7432106c8 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/nucleuscyberncprotect.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/nucleuscyberncprotect.md @@ -10,4 +10,28 @@ [NC Protect Data Connector (archtis.com)](https://info.archtis.com/get-started-with-nc-protect-sentinel-data-connector) provides the capability to ingest user activity logs and events into Microsoft Sentinel. The connector provides visibility into NC Protect user activity logs and events in Microsoft Sentinel to improve monitoring and investigation capabilities +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **NC Protect**: You must have a running instance of NC Protect for O365. Please [contact us](https://www.archtis.com/data-discovery-classification-protection-software-secure-collaboration/). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +1. Install NC Protect into your Azure Tenancy +2. Log into the NC Protect Administration site +3. From the left hand navigation menu, select General -> User Activity Monitoring +4. Tick the checkbox to Enable SIEM and click the Configure button +5. Select Microsoft Sentinel as the Application and complete the configuration using the information below +6. Click Save to activate the connection +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/nxlogaixaudit.md b/Tools/Solutions Analyzer/connector-docs/connectors/nxlogaixaudit.md index 59742d13a7b..5b9d4abb82a 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/nxlogaixaudit.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/nxlogaixaudit.md @@ -10,4 +10,22 @@ The [NXLog AIX Audit](https://docs.nxlog.co/refman/current/im/aixaudit.html) data connector uses the AIX Audit subsystem to read events directly from the kernel for capturing audit events on the AIX platform. This REST API connector can efficiently export AIX Audit events to Microsoft Sentinel in real time. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**NXLog_parsed_AIX_Audit_view**](https://aka.ms/sentinel-nxlogaixaudit-parser) which is deployed with the Microsoft Sentinel Solution. + +Follow the step-by-step instructions in the *NXLog User Guide* Integration Guide [Microsoft Sentinel](https://docs.nxlog.co/userguide/integrate/microsoft-azure-sentinel.html) to configure this connector. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/nxlogbsmmacos.md b/Tools/Solutions Analyzer/connector-docs/connectors/nxlogbsmmacos.md index daab2bd9f72..f061027a512 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/nxlogbsmmacos.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/nxlogbsmmacos.md @@ -10,4 +10,20 @@ The [NXLog BSM](https://docs.nxlog.co/refman/current/im/bsm.html) macOS data connector uses Sun's Basic Security Module (BSM) Auditing API to read events directly from the kernel for capturing audit events on the macOS platform. This REST API connector can efficiently export macOS audit events to Microsoft Sentinel in real-time. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Follow the step-by-step instructions in the *NXLog User Guide* Integration Topic [Microsoft Sentinel](https://docs.nxlog.co/userguide/integrate/microsoft-azure-sentinel.html) to configure this connector. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/nxlogdnslogs.md b/Tools/Solutions Analyzer/connector-docs/connectors/nxlogdnslogs.md index ef2f094e690..83a8725f3cb 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/nxlogdnslogs.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/nxlogdnslogs.md @@ -10,4 +10,22 @@ The NXLog DNS Logs data connector uses Event Tracing for Windows ([ETW](https://docs.microsoft.com/windows/apps/trace-processing/overview)) for collecting both Audit and Analytical DNS Server events. The [NXLog *im_etw* module](https://docs.nxlog.co/refman/current/im/etw.html) reads event tracing data directly for maximum efficiency, without the need to capture the event trace into an .etl file. This REST API connector can forward DNS Server events to Microsoft Sentinel in real time. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on parsers based on Kusto functions deployed with the Microsoft Sentinel Solution to work as expected. The [**ASimDnsMicrosoftNXLog **](https://aka.ms/sentinel-nxlogdnslogs-parser) is designed to leverage Microsoft Sentinel's built-in DNS-related analytics capabilities. + +Follow the step-by-step instructions in the *NXLog User Guide* Integration Topic [Microsoft Sentinel](https://docs.nxlog.co/userguide/integrate/microsoft-azure-sentinel.html) to configure this connector. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/nxlogfim.md b/Tools/Solutions Analyzer/connector-docs/connectors/nxlogfim.md index 17d3000539c..dbca8c78a61 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/nxlogfim.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/nxlogfim.md @@ -10,4 +10,20 @@ The [NXLog FIM](https://docs.nxlog.co/refman/current/im/fim.html) module allows for the scanning of files and directories, reporting detected additions, changes, renames and deletions on the designated paths through calculated checksums during successive scans. This REST API connector can efficiently export the configured FIM events to Microsoft Sentinel in real time. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Follow the step-by-step instructions in the [Microsoft Sentinel](https://docs.nxlog.co/userguide/integrate/microsoft-azure-sentinel.html) integration chapter of the *NXLog User Guide* to configure this connector. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/nxloglinuxaudit.md b/Tools/Solutions Analyzer/connector-docs/connectors/nxloglinuxaudit.md index f4ab0255ee8..430290d93ba 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/nxloglinuxaudit.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/nxloglinuxaudit.md @@ -10,4 +10,20 @@ The [NXLog LinuxAudit](https://docs.nxlog.co/refman/current/im/linuxaudit.html) data connector supports custom audit rules and collects logs without auditd or any other user-space software. IP addresses and group/user IDs are resolved to their respective names making [Linux audit](https://docs.nxlog.co/userguide/integrate/linux-audit.html) logs more intelligible to security analysts. This REST API connector can efficiently export Linux security events to Microsoft Sentinel in real-time. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Follow the step-by-step instructions in the *NXLog User Guide* Integration Topic [Microsoft Sentinel](https://docs.nxlog.co/userguide/integrate/microsoft-azure-sentinel.html) to configure this connector. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/obsidiandatasharing.md b/Tools/Solutions Analyzer/connector-docs/connectors/obsidiandatasharing.md index de111607ecd..13ee5c15107 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/obsidiandatasharing.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/obsidiandatasharing.md @@ -10,4 +10,41 @@ The Obsidian Datasharing connector provides the capability to read raw event data from Obsidian Datasharing in Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Custom Permissions:** +- **Microsoft Entra**: Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher. +- **Microsoft Azure**: Permission to assign Monitoring Metrics Publisher role on data collection rule (DCR). Typically requires Azure RBAC Owner or User Access Administrator role + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Create ARM Resources and Provide the Required Permissions** + +This connector reads data from the tables that Obsidian Datasharing uses in a Microsoft Analytics Workspace, if the data forwarding option is enabled in Obsidian Datasharing then raw event data is sent to the Microsoft Sentinel Ingestion API. +#### Automated Configuration and Secure Data Ingestion with Entra Application +Clicking on "Deploy" will trigger the creation of Log Analytics tables and a Data Collection Rule (DCR). +It will then create an Entra application, link the DCR to it, and set the entered secret in the application. This setup enables data to be sent securely to the DCR using an Entra token. +Deploy Obsidian Datasharing connector resources + +**2. Push your logs into the workspace** + +Use the following parameters to configure the your machine to send the logs to the workspace. +- **Tenant ID (Directory ID)**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Entra App Registration Application ID**: `ApplicationId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Entra App Registration Secret**: `ApplicationSecret` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Data Collection Endpoint Uri**: `DataCollectionEndpoint` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Data Collection Rule Immutable ID**: `DataCollectionRuleId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Activity Stream Name**: `Custom-ObsidianActivity_CL` +- **Threat Stream Name**: `Custom-ObsidianThreat_CL` + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/oci-connector-ccp-definition.md b/Tools/Solutions Analyzer/connector-docs/connectors/oci-connector-ccp-definition.md index 92c38be2e46..4ae9f291ada 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/oci-connector-ccp-definition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/oci-connector-ccp-definition.md @@ -10,4 +10,53 @@ The Oracle Cloud Infrastructure (OCI) data connector provides the capability to ingest OCI Logs from [OCI Stream](https://docs.oracle.com/iaas/Content/Streaming/Concepts/streamingoverview.htm) into Microsoft Sentinel using the [OCI Streaming REST API](https://docs.oracle.com/iaas/api/#/streaming/streaming/20180418). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **OCI Streaming API access**: Access to the OCI Streaming API through a API Signing Keys is required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect to OCI Streaming API to start collecting Event logs in Microsoft Sentinel** + +1) Log in to the OCI console and access the navigation menu. +2) In the navigation menu, go to "Analytics & AI" → "Streaming". +3) Click "Create Stream". +4) Select an existing "Stream Pool" or create a new one. +5) Enter the following details: + - "Stream Name" + - "Retention" + - "Number of Partitions" + - "Total Write Rate" + - "Total Read Rate" (based on your data volume) +6) In the navigation menu, go to "Logging" → "Service Connectors". +7) Click "Create Service Connector". +8) Enter the following details: + - "Connector Name" + - "Description" + - "Resource Compartment" +9) Select the "Source": "Logging". +10) Select the "Target": "Streaming". +11) (Optional) Configure "Log Group", "Filters", or use a "custom search query" to stream only the required logs. +12) Configure the "Target" by selecting the previously created stream. +13) Click "Create". +14) Follow the documentation to create a [Private Key and API Key Configuration File](https://docs.oracle.com/en-us/iaas/Content/API/Concepts/apisigningkey.htm). + Note : The connector only supports ingesting data from one partition ID at a time, and that ID must be a single-digit number (e.g., 0, 1, or 2). +- **Stream OCID**: Provide the OCI Stream OCID (E.g. ocid1.stream.oc1..xxxxxxEXAMPLExxxxxx) +- **Service Endpoint Base URL**: Provide the Service Endpoint Base URL: (https://cell-1.streaming.ap-hyderabad-1.oci.oraclecloud.com) +- **Cursor Type** (select) + - Individual Cursor +- **Partition Id**: Provide the Partition Id. (E.g. 0 or 1 or 2) +- **Tenant ID**: OCI Tenant ID (E.g. ocid1.tenancy.oc1..xxxxxxEXAMPLExxxxxx) +- **User ID**: Provide the User Id. (E.g. ocid1.user.oc1..xxxxxxEXAMPLExxxxxx) +- **Pem File Content**: (password field) +- **Pass Phrase**: (password field) +- **Fingerprint**: (password field) +- Click 'Connect' to establish connection + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/office365.md b/Tools/Solutions Analyzer/connector-docs/connectors/office365.md index db11a489670..6949f1e8ef1 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/office365.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/office365.md @@ -10,4 +10,37 @@ The Microsoft 365 (formerly, Office 365) activity log connector provides insight into ongoing user activities. You will get details of operations such as file downloads, access requests sent, changes to group events, set-mailbox and details of the user who performed the actions. By connecting Microsoft 365 logs into Microsoft Sentinel you can use this data to view dashboards, create custom alerts, and improve your investigation process. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2219943&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. + +**Tenant Permissions:** +Requires GlobalAdmin, SecurityAdmin on the workspace's tenant + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Office 365 activity logs to your Microsoft Sentinel.** + +Select the record types you want to collect from your tenant and click **Apply Changes**. +**Select Microsoft 365 Data Types** + +In the Microsoft Sentinel portal, select which data types to enable: + +- ☐ **Exchange** +- ☐ **SharePoint** +- ☐ **Teams** + +Each data type may have specific licensing requirements. Review the information provided for each type in the portal before enabling. + +> 💡 **Portal-Only Feature**: Data type selection is only available in the Microsoft Sentinel portal. + + +**2. Previously connected tenants** + +Microsoft Sentinel now enables Office 365 single-tenant connection. You can modify your previously connected tenants and click **Save**. +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `Office365`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/office365project.md b/Tools/Solutions Analyzer/connector-docs/connectors/office365project.md index a79f3b980ff..aab5853cad8 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/office365project.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/office365project.md @@ -10,4 +10,24 @@ Microsoft Project (MSP) is a project management software solution. Depending on your plan, Microsoft Project lets you plan projects, assign tasks, manage resources, create reports and more. This connector allows you to stream your Azure Project audit logs into Microsoft Sentinel in order to track your project activities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. + +**Custom Permissions:** +- **License**: "Microsoft Project eligible license is required." + +**Tenant Permissions:** +Requires GlobalAdmin, SecurityAdmin on the workspace's tenant + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Microsoft Project audit logs to Microsoft Sentinel** + +This connector uses the Office Management API to get your Project audit logs. The logs will be stored and processed in your existing Microsoft Sentinel workspace. You can find the data in the **ProjectActivity** table. +- Connect Microsoft Project + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/officeatp.md b/Tools/Solutions Analyzer/connector-docs/connectors/officeatp.md index 6eb61f74ad7..237fa1ec2a2 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/officeatp.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/officeatp.md @@ -36,4 +36,24 @@ These alerts can be seen by Office customers in the ** Office Security and Compl For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2219942&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. + +**Licenses:** +- Office Advanced Threat Protection + +**Tenant Permissions:** +Requires GlobalAdmin, SecurityAdmin on the workspace's tenant + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Microsoft Defender for Office 365 alerts to Microsoft Sentinel** + +Connecting Microsoft Defender for Office 365 will cause your data that is collected by Microsoft Defender for Office 365 service to be stored and processed in the location that you have configured your Microsoft Sentinel workspace. +- Connect Microsoft Defender for Office 365 + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/officeirm.md b/Tools/Solutions Analyzer/connector-docs/connectors/officeirm.md index eae214fc819..becf45d8c2f 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/officeirm.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/officeirm.md @@ -30,4 +30,26 @@ This solution produces alerts that can be seen by Office customers in the Inside These alerts can be imported into Microsoft Sentinel with this connector, allowing you to see, investigate, and respond to them in a broader organizational threat context. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2223721&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. + +**Custom Permissions:** + +**Licenses:** +- Office Information Rights Management + +**Tenant Permissions:** +Requires GlobalAdmin, SecurityAdmin on the workspace's tenant + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Microsoft 365 Insider Risk Management alerts to Microsoft Sentinel** + +Connecting Microsoft 365 Insider Risk Management will cause your data that is collected by Microsoft 365 Insider Risk Management service to be stored and processed in the location that you have configured your Microsoft Sentinel workspace. +- Connect Microsoft 365 Insider Risk Management + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/officepowerbi.md b/Tools/Solutions Analyzer/connector-docs/connectors/officepowerbi.md index 303fd28b89d..3b8cb5db13c 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/officepowerbi.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/officepowerbi.md @@ -10,4 +10,24 @@ Microsoft PowerBI is a collection of software services, apps, and connectors that work together to turn your unrelated sources of data into coherent, visually immersive, and interactive insights. Your data may be an Excel spreadsheet, a collection of cloud-based and on-premises hybrid data warehouses, or a data store of some other type. This connector lets you stream PowerBI audit logs into Microsoft Sentinel, allowing you to track user activities in your PowerBI environment. You can filter the audit data by date range, user, dashboard, report, dataset, and activity type. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. + +**Custom Permissions:** +- **License**: Microsoft Power BI eligible license is required. + +**Tenant Permissions:** +Requires GlobalAdmin, SecurityAdmin on the workspace's tenant + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Microsoft PowerBI audit logs to Microsoft Sentinel** + +This connector uses the Office Management API to get your PowerBI audit logs. The logs will be stored and processed in your existing Microsoft Sentinel workspace. You can find the data in the **PowerBIActivity** table. +- Connect Microsoft PowerBI + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/oktasso-polling.md b/Tools/Solutions Analyzer/connector-docs/connectors/oktasso-polling.md index 2db95419b8c..f34ffec3198 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/oktasso-polling.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/oktasso-polling.md @@ -10,4 +10,18 @@ The [Okta Single Sign-On (SSO)](https://www.okta.com/products/single-sign-on/) connector provides the capability to ingest audit and event logs from the Okta API into Microsoft entinel. The connector provides visibility into these log types in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect OktaSSO** + +Please insert your APIKey +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `APIKey`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/oktasso.md b/Tools/Solutions Analyzer/connector-docs/connectors/oktasso.md index 83bb66a1c35..270ad104481 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/oktasso.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/oktasso.md @@ -10,4 +10,77 @@ The [Okta Single Sign-On (SSO)](https://www.okta.com/products/single-sign-on/) connector provides the capability to ingest audit and event logs from the Okta API into Microsoft Sentinel. The connector provides visibility into these log types in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Okta API Token**: An Okta API Token is required. See the documentation to learn more about the [Okta System Log API](https://developer.okta.com/docs/reference/api/system-log/). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to Okta SSO to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**NOTE:** This connector has been updated, if you have previously deployed an earlier version, and want to update, please delete the existing Okta Azure Function before redeploying this version. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Configuration steps for the Okta SSO API** + + [Follow these instructions](https://developer.okta.com/docs/guides/create-an-api-token/create-the-token/) to create an API Token. + +**Note** - For more information on the rate limit restrictions enforced by Okta, please refer to the **[documentation](https://developer.okta.com/docs/reference/rl-global-mgmt/)**. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Okta SSO connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Okta SSO API Authorization Token, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Option 1 - Azure Resource Manager (ARM) Template** + + This method provides an automated deployment of the Okta SSO connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentineloktaazuredeployv2-solution) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentineloktaazuredeployv2-solution-gov) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID**, **Workspace Key**, **API Token** and **URI**. + - Use the following schema for the `uri` value: `https:///api/v1/logs?since=` Replace `` with your domain. [Click here](https://developer.okta.com/docs/reference/api-overview/#url-namespace) for further details on how to identify your Okta domain namespace. There is no need to add a time value to the URI, the Function App will dynamically append the inital start time of logs to UTC 0:00 for the current UTC date as time value to the URI in the proper format. + - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + + **Option 2 - Manual Deployment of Azure Functions** + + Use the following step-by-step instructions to deploy the Okta SSO connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + 1. Download the [Azure Function App](https://aka.ms/sentineloktaazurefunctioncodev2) file. Extract archive to your local development computer. +2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode. +3. After successful deployment of the function app, follow next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. Go to Azure Portal for the Function App configuration. +2. In the Function App, select the Function App Name and select **Configuration**. +3. In the **Application settings** tab, select **+ New application setting**. +4. Add each of the following five (5) application settings individually, with their respective string values (case-sensitive): + apiToken + workspaceID + workspaceKey + uri + logAnalyticsUri (optional) + - Use the following schema for the `uri` value: `https:///api/v1/logs?since=` Replace `` with your domain. [Click here](https://developer.okta.com/docs/reference/api-overview/#url-namespace) for further details on how to identify your Okta domain namespace. There is no need to add a time value to the URI, the Function App will dynamically append the inital start time of logs to UTC 0:00 for the current UTC date as time value to the URI in the proper format. + - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: https://.ods.opinsights.azure.us. +5. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/oktassov2.md b/Tools/Solutions Analyzer/connector-docs/connectors/oktassov2.md index 23e42824355..5d005e1588d 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/oktassov2.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/oktassov2.md @@ -10,4 +10,42 @@ The [Okta Single Sign-On (SSO)](https://www.okta.com/products/single-sign-on/) data connector provides the capability to ingest audit and event logs from the Okta Sysem Log API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform and uses the Okta System Log API to fetch the events. The connector supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **Okta API Token**: An Okta API token. Follow the [following instructions](https://developer.okta.com/docs/guides/create-an-api-token/main/) to create an See the [documentation](https://developer.okta.com/docs/reference/api/system-log/) to learn more about Okta System Log API. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +To enable the Okta Single Sign-On for Microsoft Sentinel, provide the required information below and click on Connect. +> +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Endpoint** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + +**Add domain** + +*Add domain* + +When you click the "Add domain" button in the portal, a configuration form will open. You'll need to provide: + +- **Okta Domain Name** (optional): Okta Domain Name (e.g., myDomain.okta.com) +- **API Key** (optional): API Key + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/onapsis.md b/Tools/Solutions Analyzer/connector-docs/connectors/onapsis.md index 8469c15a757..71185c71a7b 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/onapsis.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/onapsis.md @@ -10,4 +10,41 @@ Onapsis Defend Integration is aimed at forwarding alerts and logs collected and detected by Onapsis Platform into Microsoft Sentinel SIEM +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. + +**Custom Permissions:** +- **Microsoft Entra**: Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher. +- **Microsoft Azure**: Permission to assign Monitoring Metrics Publisher role on data collection rules. Typically requires Azure RBAC Owner or User Access Administrator role. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Create ARM Resources and Provide the Required Permissions** + +We will create data collection rule (DCR) and data collection endpoint (DCE) resources. We will also create a Microsoft Entra app registration and assign the required permissions to it. +#### Automated deployment of Azure resources +Clicking on "Deploy push connector resources" will trigger the creation of DCR and DCE resources. +It will then create a Microsoft Entra app registration with client secret and grant permissions on the DCR. This setup enables data to be sent securely to the DCR using a OAuth v2 client credentials. +- Deploy push connector resources + Application: Onapsis Defend Integration push to Microsoft Sentinel + +**2. Maintain the data collection endpoint details and authentication info in Onapsis Defend Integration** + +Share the data collection endpoint URL and authentication info with the Onapsis Defend Integration administrator to configure the Onapsis Defend Integration to send data to the data collection endpoint. +- **Use this value to configure as Tenant ID in the LogIngestionAPI credential.**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Entra Application ID**: `ApplicationId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Entra Application Secret**: `ApplicationSecret` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Use this value to configure the LogsIngestionURL parameter when deploying the IFlow.**: `DataCollectionEndpoint` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **DCR Immutable ID**: `DataCollectionRuleId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/onapsisplatform.md b/Tools/Solutions Analyzer/connector-docs/connectors/onapsisplatform.md index 9bb6de16e54..b3dad087045 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/onapsisplatform.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/onapsisplatform.md @@ -10,4 +10,65 @@ The Onapsis Connector allows you to export the alarms triggered in the Onapsis Platform into Microsoft Sentinel in real-time. This gives you the ability to monitor the activity on your SAP systems, identify incidents and respond to them quickly. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your Onapsis Console and Microsoft Sentinel. This machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Common Event Format (CEF) logs to Syslog agent** + +Refer to the Onapsis in-product help to set up log forwarding to the Syslog agent. + +> 1. Go to Setup > Third-party integrations > Defend Alarms and follow the instructions for Microsoft Sentinel. + +> 2. Make sure your Onapsis Console can reach the proxy machine where the agent is installed - logs should be sent to port 514 using TCP. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Create Onapsis lookup function for incident enrichment** + +[Follow these steps to get this Kusto function](https://aka.ms/sentinel-Onapsis-parser) + +**5. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/oneidentity.md b/Tools/Solutions Analyzer/connector-docs/connectors/oneidentity.md index 221733652ff..aebdfc93f3a 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/oneidentity.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/oneidentity.md @@ -10,4 +10,59 @@ The One Identity Safeguard (CEF) Sentinel data connector enhances the standard Common Event Format (CEF) connector with Safeguard for Privileged Sessions-specific dashboards. Use this connector to easily start utilizing the events generated by your device for visualization, alerts, investigations and more. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. +- **Keys** (Workspace): read permissions to shared keys for the workspace. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python --version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward One Identity SafeGuard logs to Syslog agent** + +Follow the [instructions in the Safeguard for Privileged Sessions Administration Guide](https://aka.ms/sentinel-cef-oneidentity-forwarding) in section "Universal SIEM Forwarder". Make sure to select the format "CEF". +  +Note that by default there is no TLS security set up in the syslog on the Linux machine. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python --version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/onelogin.md b/Tools/Solutions Analyzer/connector-docs/connectors/onelogin.md index 537351bf4b5..22a8439335c 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/onelogin.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/onelogin.md @@ -14,4 +14,78 @@ The [OneLogin](https://www.onelogin.com/) data connector provides the capability

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

+## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Webhooks Credentials/permissions**: **OneLoginBearerToken**, **Callback URL** are required for working Webhooks. See the documentation to learn more about [configuring Webhooks](https://onelogin.service-now.com/kb_view_customer.do?sysparm_article=KB0010469).You need to generate **OneLoginBearerToken** according to your security requirements and use it in **Custom Headers** section in format: Authorization: Bearer **OneLoginBearerToken**. Logs Format: JSON Array. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector uses Azure Functions based on HTTP Trigger for waiting POST requests with logs to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**OneLogin**](https://aka.ms/sentinel-OneLogin-parser) which is deployed with the Microsoft Sentinel Solution. + +**STEP 1 - Configuration steps for the OneLogin** + + Follow the [instructions](https://onelogin.service-now.com/kb_view_customer.do?sysparm_article=KB0010469) to configure Webhooks. + +1. Generate the **OneLoginBearerToken** according to your password policy. +2. Set Custom Header in the format: Authorization: Bearer . +3. Use JSON Array Logs Format. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the OneLogin data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Option 1 - Azure Resource Manager (ARM) Template** + + Use this method for automated deployment of the OneLogin data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-OneLogin-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. +3. Enter the **OneLoginBearerToken** and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. +6. After deploying open Function App page, select your app, go to the **Functions** and click **Get Function Url** copy it and follow p.7 from STEP 1. + + **Option 2 - Manual Deployment of Azure Functions** + + Use the following step-by-step instructions to deploy the OneLogin data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-OneLogin-functionapp) file. Extract archive to your local development computer. +2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode. +3. After successful deployment of the function app, follow next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. Go to Azure Portal for the Function App configuration. +2. In the Function App, select the Function App Name and select **Configuration**. +3. In the **Application settings** tab, select ** New application setting**. +4. Add each of the following application settings individually, with their respective string values (case-sensitive): + OneLoginBearerToken + WorkspaceID + WorkspaceKey + logAnalyticsUri (optional) +> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +5. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/oneloginiamlogsccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/oneloginiamlogsccpdefinition.md index 1c344df0d4b..9cfc89a2c40 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/oneloginiamlogsccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/oneloginiamlogsccpdefinition.md @@ -10,4 +10,47 @@ The [OneLogin](https://www.onelogin.com/) data connector provides the capability to ingest common OneLogin IAM Platform events into Microsoft Sentinel through REST API by using OneLogin [Events API](https://developers.onelogin.com/api-docs/1/events/get-events) and OneLogin [Users API](https://developers.onelogin.com/api-docs/1/users/get-users). The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **OneLogin IAM API Credentials**: To create API Credentials follow the document link provided here, [Click Here](https://developers.onelogin.com/api-docs/1/getting-started/working-with-api-credentials). + Make sure to have an account type of either account owner or administrator to create the API credentials. + Once you create the API Credentials you get your Client ID and Client Secret. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect OneLogin IAM Platform to Microsoft Sentinel** + +To ingest data from OneLogin IAM to Microsoft Sentinel, you have to click on Add Domain button below then you get a pop up to fill the details, provide the required information and click on Connect. You can see the domain endpoints connected in the grid. +> +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Endpoint** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + +**Add domain** + +*Add domain* + +When you click the "Add domain" button in the portal, a configuration form will open. You'll need to provide: + +- **OneLogin Domain** (optional): Enter your Company's OneLogin Domain +- **Client ID** (optional): Enter your Client ID +- **Client Secret** (optional): Enter your Client Secret + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/onetrustpush.md b/Tools/Solutions Analyzer/connector-docs/connectors/onetrustpush.md index 06fdc820a78..7277d6c7622 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/onetrustpush.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/onetrustpush.md @@ -10,4 +10,40 @@ The OneTrust connector for Microsoft Sentinel provides the capability to have near real time visibility into where sensitive data has been located or remediated across across Google Cloud and other OneTrust supported data sources. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Custom Permissions:** +- **Microsoft Entra**: Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher. +- **Microsoft Azure**: Permission to assign Monitoring Metrics Publisher role on data collection rule (DCR). Typically requires Azure RBAC Owner or User Access Administrator role + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Create ARM Resources and Provide the Required Permissions** + +This connector reads data from the tables that OneTrust uses in a Microsoft Analytics Workspace. If OneTrust's data forwarding option is enabled then raw event data can be sent to the Microsoft Sentinel Ingestion API. +#### Automated Configuration and Secure Data Ingestion with Entra Application +Clicking on "Deploy" will trigger the creation of Log Analytics tables and a Data Collection Rule (DCR). +It will then create an Entra application, link the DCR to it, and set the entered secret in the application. This setup enables data to be sent securely to the DCR using an Entra token. +Deploy OneTrust connector resources + +**2. Push your logs into the workspace** + +Use the following parameters to configure the your machine to send the logs to the workspace. +- **Tenant ID (Directory ID)**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Entra App Registration Application ID**: `ApplicationId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Entra App Registration Secret**: `ApplicationSecret` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Data Collection Endpoint Uri**: `DataCollectionEndpoint` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Data Collection Rule Immutable ID**: `DataCollectionRuleId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **OneTrust Metadata Stream Name**: `Custom-OneTrustMetadataV3` + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/openvpn.md b/Tools/Solutions Analyzer/connector-docs/connectors/openvpn.md index 443f36f949d..df3453f00e4 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/openvpn.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/openvpn.md @@ -10,4 +10,57 @@ The [OpenVPN](https://github.com/OpenVPN) data connector provides the capability to ingest OpenVPN Server logs into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**OpenVpnEvent**](https://aka.ms/sentinel-openvpn-parser) which is deployed with the Microsoft Sentinel Solution. + +**1. Install and onboard the agent for Linux or Windows** + +Install the agent on the Server where the OpenVPN are forwarded. + +> Logs from OpenVPN Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + +1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. +2. Select **Apply below configuration to my machines** and select the facilities and severities. +3. Click **Save**. +- **Open Syslog settings** + +**3. Check your OpenVPN logs.** + +OpenVPN server logs are written into common syslog file (depending on the Linux distribution used: e.g. /var/log/messages) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/oraclecloudinfrastructurelogsconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/oraclecloudinfrastructurelogsconnector.md index 5fb3950c09b..c0f3b6e1dce 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/oraclecloudinfrastructurelogsconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/oraclecloudinfrastructurelogsconnector.md @@ -14,4 +14,98 @@ The Oracle Cloud Infrastructure (OCI) data connector provides the capability to

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

+## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **OCI API Credentials**: **API Key Configuration File** and **Private Key** are required for OCI API connection. See the documentation to learn more about [creating keys for API access](https://docs.oracle.com/en-us/iaas/Content/API/Concepts/apisigningkey.htm) + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector can go over the 500 column limit of log Analytics. When this happens some logs will be dropped. For this reason the connector can be unrealiable depending on the logs that are being generated and collected. + +>**NOTE:** This connector uses Azure Functions to connect to the Azure Blob Storage API to pull logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**OCILogs**](https://aka.ms/sentinel-OracleCloudInfrastructureLogsConnector-parser) which is deployed with the Microsoft Sentinel Solution. + +**STEP 1 - Creating Stream** + +1. Log in to OCI console and go to *navigation menu* -> *Analytics & AI* -> *Streaming* +2. Click *Create Stream* +3. Select Stream Pool or create a new one +4. Provide the *Stream Name*, *Retention*, *Number of Partitions*, *Total Write Rate*, *Total Read Rate* based on your data amount. +5. Go to *navigation menu* -> *Logging* -> *Service Connectors* +6. Click *Create Service Connector* +6. Provide *Connector Name*, *Description*, *Resource Compartment* +7. Select Source: Logging +8. Select Target: Streaming +9. (Optional) Configure *Log Group*, *Filters* or use custom search query to stream only logs that you need. +10. Configure Target - select the strem created before. +11. Click *Create* + +Check the documentation to get more information about [Streaming](https://docs.oracle.com/en-us/iaas/Content/Streaming/home.htm) and [Service Connectors](https://docs.oracle.com/en-us/iaas/Content/service-connector-hub/home.htm). + +**STEP 2 - Creating credentials for OCI REST API** + +Follow the documentation to [create Private Key and API Key Configuration File.](https://docs.oracle.com/en-us/iaas/Content/API/Concepts/apisigningkey.htm) + +>**IMPORTANT:** Save Private Key and API Key Configuration File created during this step as they will be used during deployment step. + +**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the OCI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as OCI API credentials, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Option 1 - Azure Resource Manager (ARM) Template** + + Use this method for automated deployment of the OCI data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-OracleCloudInfrastructureLogsConnector-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key**, **User**, **Key_content**, **Pass_phrase**, **Fingerprint**, **Tenancy**, **Region**, **Message Endpoint**, **Stream Ocid** +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + + **Option 2 - Manual Deployment of Azure Functions** + + Use the following step-by-step instructions to deploy the OCI data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + 1. Download the [Azure Function App](https://aka.ms/sentinel-OracleCloudInfrastructureLogsConnector-functionapp) file. Extract archive to your local development computer.. +2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode. +3. After successful deployment of the function app, follow next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. Go to Azure Portal for the Function App configuration. +2. In the Function App, select the Function App Name and select **Configuration**. +3. In the **Application settings** tab, select **+ New application setting**. +4. Add each of the following application settings individually, with their respective string values (case-sensitive): + AzureSentinelWorkspaceId + AzureSentinelSharedKey + user + key_content + pass_phrase (Optional) + fingerprint + tenancy + region + Message Endpoint + StreamOcid + logAnalyticsUri (Optional) + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. +5. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/oracledatabaseaudit.md b/Tools/Solutions Analyzer/connector-docs/connectors/oracledatabaseaudit.md index bcbdf7e761e..ac95a54d7eb 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/oracledatabaseaudit.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/oracledatabaseaudit.md @@ -10,4 +10,55 @@ The Oracle DB Audit data connector provides the capability to ingest [Oracle Database](https://www.oracle.com/database/technologies/) audit events into Microsoft Sentinel through the syslog. Refer to [documentation](https://docs.oracle.com/en/database/oracle/oracle-database/21/dbseg/introduction-to-auditing.html#GUID-94381464-53A3-421B-8F13-BD171C867405) for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Oracle Database Audit and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OracleDatabaseAudit/Parsers/OracleDatabaseAuditEvent.txt). The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + +1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. +2. Select **Apply below configuration to my machines** and select the facilities and severities. +3. Click **Save**. +- **Open Syslog settings** + +**3. Configure Oracle Database Audit events to be sent to Syslog** + +Follow the below instructions + + 1. Create the Oracle database [Follow these steps.](https://learn.microsoft.com/en-us/azure/virtual-machines/workloads/oracle/oracle-database-quick-create) + + 2. Login to Oracle database created from the above step [Follow these steps.](https://docs.oracle.com/cd/F49540_01/DOC/server.815/a67772/create.htm) + + 3. Enable unified logging over syslog by **Alter the system to enable unified logging** [Following these steps.](https://docs.oracle.com/en/database/oracle/oracle-database/21/refrn/UNIFIED_AUDIT_COMMON_SYSTEMLOG.html#GUID-9F26BC8E-1397-4B0E-8A08-3B12E4F9ED3A) + + 4. Create and **enable an Audit policy for unified auditing** [Follow these steps.](https://docs.oracle.com/en/database/oracle/oracle-database/19/sqlrf/CREATE-AUDIT-POLICY-Unified-Auditing.html#GUID-8D6961FB-2E50-46F5-81F7-9AEA314FC693) + + 5. **Enabling syslog and Event Viewer** Captures for the Unified Audit Trail [Follow these steps.](https://docs.oracle.com/en/database/oracle/oracle-database/18/dbseg/administering-the-audit-trail.html#GUID-3EFB75DB-AE1C-44E6-B46E-30E5702B0FC4) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/oracleweblogicserver.md b/Tools/Solutions Analyzer/connector-docs/connectors/oracleweblogicserver.md index 68e0f63b2e0..5bd1d2f1423 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/oracleweblogicserver.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/oracleweblogicserver.md @@ -10,4 +10,60 @@ OracleWebLogicServer data connector provides the capability to ingest [OracleWebLogicServer](https://docs.oracle.com/en/middleware/standalone/weblogic-server/index.html) events into Microsoft Sentinel. Refer to [OracleWebLogicServer documentation](https://docs.oracle.com/en/middleware/standalone/weblogic-server/14.1.1.0/index.html) for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias OracleWebLogicServerEvent and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OracleWebLogicServer/Parsers/OracleWebLogicServerEvent.yaml). The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux or Windows** + +Install the agent on the Oracle WebLogic Server where the logs are generated. + +> Logs from Oracle WebLogic Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure the logs to be collected** + +Configure the custom log directory to be collected +- **Open custom logs settings** + +1. Select the link above to open your workspace advanced settings +2. From the left pane, select **Data**, select **Custom Logs** and click **Add+** +3. Click **Browse** to upload a sample of a OracleWebLogicServer log file (e.g. server.log). Then, click **Next >** +4. Select **New line** as the record delimiter and click **Next >** +5. Select **Windows** or **Linux** and enter the path to OracleWebLogicServer logs based on your configuration. Example: + - **Linux** Directory: 'DOMAIN_HOME/servers/server_name/logs/*.log' + - **Windows** Directory: 'DOMAIN_NAME\servers\SERVER_NAME\logs\*.log' +6. After entering the path, click the '+' symbol to apply, then click **Next >** +7. Add **OracleWebLogicServer_CL** as the custom log Name and click **Done** + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/orcasecurityalerts.md b/Tools/Solutions Analyzer/connector-docs/connectors/orcasecurityalerts.md index f0f3ffce56a..838d12d0d16 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/orcasecurityalerts.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/orcasecurityalerts.md @@ -10,4 +10,20 @@ The Orca Security Alerts connector allows you to easily export Alerts logs to Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Follow [guidance](https://orcasecurity.zendesk.com/hc/en-us/articles/360043941992-Azure-Sentinel-configuration) for integrating Orca Security Alerts logs with Microsoft Sentinel. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ossec.md b/Tools/Solutions Analyzer/connector-docs/connectors/ossec.md index f337d9ace51..77e1dbbc36e 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ossec.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ossec.md @@ -10,4 +10,59 @@ OSSEC data connector provides the capability to ingest [OSSEC](https://www.ossec.net/) events into Microsoft Sentinel. Refer to [OSSEC documentation](https://www.ossec.net/docs) for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias OSSEC and load the function code or click [here](https://aka.ms/sentinel-OSSECEvent-parser), on the second line of the query, enter the hostname(s) of your OSSEC device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Common Event Format (CEF) logs to Syslog agent** + +[Follow these steps](https://www.ossec.net/docs/docs/manual/output/syslog-output.html) to configure OSSEC sending alerts via syslog. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ossecama.md b/Tools/Solutions Analyzer/connector-docs/connectors/ossecama.md index 59cad9d9a10..ecdcdd83f17 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ossecama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ossecama.md @@ -10,4 +10,59 @@ OSSEC data connector provides the capability to ingest [OSSEC](https://www.ossec.net/) events into Microsoft Sentinel. Refer to [OSSEC documentation](https://www.ossec.net/docs) for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias OSSEC and load the function code or click [here](https://aka.ms/sentinel-OSSECEvent-parser), on the second line of the query, enter the hostname(s) of your OSSEC device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Common Event Format (CEF) logs to Syslog agent** + + [Follow these steps](https://www.ossec.net/docs/docs/manual/output/syslog-output.html) to configure OSSEC sending alerts via syslog. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/paloaltocdl.md b/Tools/Solutions Analyzer/connector-docs/connectors/paloaltocdl.md index 50064c8edba..b555bb3295b 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/paloaltocdl.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/paloaltocdl.md @@ -10,4 +10,59 @@ The [Palo Alto Networks CDL](https://www.paloaltonetworks.com/cortex/cortex-data-lake) data connector provides the capability to ingest [CDL logs](https://docs.paloaltonetworks.com/strata-logging-service/log-reference/log-forwarding-schema-overview) into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**PaloAltoCDLEvent**](https://aka.ms/sentinel-paloaltocdl-parser) which is deployed with the Microsoft Sentinel Solution. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Configure Cortex Data Lake to forward logs to a Syslog Server using CEF** + +[Follow the instructions](https://docs.paloaltonetworks.com/cortex/cortex-data-lake/cortex-data-lake-getting-started/get-started-with-log-forwarding-app/forward-logs-from-logging-service-to-syslog-server.html) to configure logs forwarding from Cortex Data Lake to a Syslog Server. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/paloaltocdlama.md b/Tools/Solutions Analyzer/connector-docs/connectors/paloaltocdlama.md index 4e669580962..a4ac7ff7b9c 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/paloaltocdlama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/paloaltocdlama.md @@ -10,4 +10,59 @@ The [Palo Alto Networks CDL](https://www.paloaltonetworks.com/cortex/cortex-data-lake) data connector provides the capability to ingest [CDL logs](https://docs.paloaltonetworks.com/strata-logging-service/log-reference/log-forwarding-schema-overview) into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**PaloAltoCDLEvent**](https://aka.ms/sentinel-paloaltocdl-parser) which is deployed with the Microsoft Sentinel Solution. +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Configure Cortex Data Lake to forward logs to a Syslog Server using CEF** + + [Follow the instructions](https://docs.paloaltonetworks.com/cortex/cortex-data-lake/cortex-data-lake-getting-started/get-started-with-log-forwarding-app/forward-logs-from-logging-service-to-syslog-server.html) to configure logs forwarding from Cortex Data Lake to a Syslog Server. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/paloaltoexpanseccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/paloaltoexpanseccpdefinition.md index 87548e74c70..ddc92f185d5 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/paloaltoexpanseccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/paloaltoexpanseccpdefinition.md @@ -10,4 +10,41 @@ The Palo Alto Cortex Xpanse data connector ingests alerts data into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Palo Alto Xpanse to Microsoft Sentinel** + +To ingest data from Palo Alto Cortex Xpanse to Microsoft Sentinel, click on **Add Domain**. Fill in the required details in the pop-up and click Connect. You will see connected domain endpoints in the grid below. To get the Auth ID and API Key, go to **Settings → Configuration → Integrations → API Keys** in the Cortex Xpanse portal and generate new credentials. +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Endpoint** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + +**Add domain** + +*Add domain* + +When you click the "Add domain" button in the portal, a configuration form will open. You'll need to provide: + +- **Domain Name** (optional): e.g., example.crtx.us.paloaltonetworks.com +- **API Key** (optional): Enter your Palo Alto Xpanse API Key +- **Xpanse Auth ID** (optional): Enter your Xpanse Auth ID + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/paloaltonetworks.md b/Tools/Solutions Analyzer/connector-docs/connectors/paloaltonetworks.md index 154b6e722f3..2df4d5d2718 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/paloaltonetworks.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/paloaltonetworks.md @@ -10,4 +10,67 @@ The Palo Alto Networks firewall connector allows you to easily connect your Palo Alto Networks logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Palo Alto Networks logs to Syslog agent** + +Configure Palo Alto Networks to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent. + +Go to [configure Palo Alto Networks NGFW for sending CEF events.](https://aka.ms/sentinel-paloaltonetworks-readme) + +Go to [Palo Alto CEF Configuration](https://aka.ms/asi-syslog-paloalto-forwarding) and Palo Alto [Configure Syslog Monitoring](https://aka.ms/asi-syslog-paloalto-configure) steps 2, 3, choose your version, and follow the instructions using the following guidelines: + +1. Set the Syslog server format to **BSD**. + +2. The copy/paste operations from the PDF might change the text and insert random characters. To avoid this, copy the text to an editor and remove any characters that might break the log format before pasting it. + +[Learn more >](https://aka.ms/CEFPaloAlto) + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/paloaltonetworksama.md b/Tools/Solutions Analyzer/connector-docs/connectors/paloaltonetworksama.md index 721a4126ff5..9c2e5a4f1d6 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/paloaltonetworksama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/paloaltonetworksama.md @@ -10,4 +10,68 @@ The Palo Alto Networks firewall connector allows you to easily connect your Palo Alto Networks logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Palo Alto Networks logs to Syslog agent** + + Configure Palo Alto Networks to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent. + +Go to [configure Palo Alto Networks NGFW for sending CEF events.](https://aka.ms/sentinel-paloaltonetworks-readme) + +Go to [Palo Alto CEF Configuration](https://aka.ms/asi-syslog-paloalto-forwarding) and Palo Alto [Configure Syslog Monitoring](https://aka.ms/asi-syslog-paloalto-configure) steps 2, 3, choose your version, and follow the instructions using the following guidelines: + +1. Set the Syslog server format to **BSD**. + +2. The copy/paste operations from the PDF might change the text and insert random characters. To avoid this, copy the text to an editor and remove any characters that might break the log format before pasting it. + +[Learn more >](https://aka.ms/CEFPaloAlto) + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/paloaltonetworkscortex.md b/Tools/Solutions Analyzer/connector-docs/connectors/paloaltonetworkscortex.md index 1639f529b34..f1f32c699f7 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/paloaltonetworkscortex.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/paloaltonetworkscortex.md @@ -10,4 +10,63 @@ The Palo Alto Networks Cortex XDR connector gives you an easy way to connect to your Cortex XDR logs with Microsoft Sentinel. This increases the visibility of your endpoint security. It will give you better ability to monitor your resources by creating custom Workbooks, analytics rules, Incident investigation, and evidence gathering. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Azure Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Azure Sentinel will use as the proxy between your security solution and Azure Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Azure Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Palo Alto Networks (Cortex) logs to Syslog agent** + +> 1. Go to [Cortex Settings and Configurations](https://inspira.xdr.in.paloaltonetworks.com/configuration/external-alerting) and Click to add New Server under External Applications. + +> 2. Then specify the name and Give public IP of your syslog server in Destination. + +> 3. Give Port number as 514 and from Facility field select FAC_SYSLOG from dropdown. + +> 4. Select Protocol as UDP and hit Create. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/paloaltoprismacloud.md b/Tools/Solutions Analyzer/connector-docs/connectors/paloaltoprismacloud.md index cdd2ffbaa42..b85f4823d87 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/paloaltoprismacloud.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/paloaltoprismacloud.md @@ -14,4 +14,76 @@ The Palo Alto Prisma Cloud CSPM data connector provides the capability to ingest

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

+## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Palo Alto Prisma Cloud API Credentials**: **Prisma Cloud API Url**, **Prisma Cloud Access Key ID**, **Prisma Cloud Secret Key** are required for Prisma Cloud API connection. See the documentation to learn more about [creating Prisma Cloud Access Key](https://docs.paloaltonetworks.com/prisma/prisma-cloud/prisma-cloud-admin/manage-prisma-cloud-administrators/create-access-keys.html) and about [obtaining Prisma Cloud API Url](https://prisma.pan.dev/api/cloud/api-urls) + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Palo Alto Prisma Cloud REST API to pull logs into Microsoft sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**PaloAltoPrismaCloud**](https://aka.ms/sentinel-PaloAltoPrismaCloud-parser) which is deployed with the Microsoft sentinel Solution. + +**STEP 1 - Configuration of the Prisma Cloud** + +Follow the documentation to [create Prisma Cloud Access Key](https://docs.paloaltonetworks.com/prisma/prisma-cloud/prisma-cloud-admin/manage-prisma-cloud-administrators/create-access-keys.html) and [obtain Prisma Cloud API Url](https://api.docs.prismacloud.io/reference) + + NOTE: Please use SYSTEM ADMIN role for giving access to Prisma Cloud API because only SYSTEM ADMIN role is allowed to View Prisma Cloud Audit Logs. Refer to [Prisma Cloud Administrator Permissions (paloaltonetworks.com)](https://docs.paloaltonetworks.com/prisma/prisma-cloud/prisma-cloud-admin/manage-prisma-cloud-administrators/prisma-cloud-admin-permissions) for more details of administrator permissions. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Prisma Cloud data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Prisma Cloud API credentials, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Option 1 - Azure Resource Manager (ARM) Template** + + Use this method for automated deployment of the Prisma Cloud data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-PaloAltoPrismaCloud-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Prisma Cloud API Url**, **Prisma Cloud Access Key ID**, **Prisma Cloud Secret Key**, **Microsoft sentinel Workspace Id**, **Microsoft sentinel Shared Key** +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + + **Option 2 - Manual Deployment of Azure Functions** + + Use the following step-by-step instructions to deploy the Prisma Cloud data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-PaloAltoPrismaCloud-functionapp) file. Extract archive to your local development computer. +2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode. +3. After successful deployment of the function app, follow next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. Go to Azure Portal for the Function App configuration. +2. In the Function App, select the Function App Name and select **Configuration**. +3. In the **Application settings** tab, select **+ New application setting**. +4. Add each of the following application settings individually, with their respective string values (case-sensitive): + PrismaCloudAPIUrl + PrismaCloudAccessKeyID + PrismaCloudSecretKey + AzureSentinelWorkspaceId + AzureSentinelSharedKey + logAnalyticsUri (Optional) + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. +5. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/paloaltoprismacloudcspmccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/paloaltoprismacloudcspmccpdefinition.md index ac78e5edc81..183650b157e 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/paloaltoprismacloudcspmccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/paloaltoprismacloudcspmccpdefinition.md @@ -10,4 +10,33 @@ The Palo Alto Prisma Cloud CSPM data connector allows you to connect to your Palo Alto Prisma Cloud CSPM instance and ingesting Alerts (https://pan.dev/prisma-cloud/api/cspm/alerts/) & Audit Logs(https://pan.dev/prisma-cloud/api/cspm/audit-logs/) into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Palo Alto Prisma Cloud CSPM Events to Microsoft Sentinel** + +To get more information on how to obtain the Prisma Cloud Access Key, Secret Key, and Base URL, please refer to the[connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud/Data%20Connectors/Readme.md), provide the required information below and click on Connect. +> +- **Prisma Cloud Access Key**: Enter Access Key +- **Prisma Cloud Secret Key**: (password field) +- **Prisma Cloud Base URL**: https://api2.eu.prismacloud.io +- Click 'Connect' to establish connection +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **PaloAltoPrismaCloudCSPM Api Endpoints** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/paloaltoprismacloudcwpp.md b/Tools/Solutions Analyzer/connector-docs/connectors/paloaltoprismacloudcwpp.md index 61f4de590e4..91461b2ca3e 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/paloaltoprismacloudcwpp.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/paloaltoprismacloudcwpp.md @@ -10,4 +10,26 @@ The [Palo Alto Prisma Cloud CWPP](https://prisma.pan.dev/api/cloud/cwpp/audits/#operation/get-audits-incidents) data connector allows you to connect to your Palo Alto Prisma Cloud CWPP instance and ingesting alerts into Microsoft Sentinel. The data connector is built on Microsoft Sentinel's Codeless Connector Platform and uses the Prisma Cloud API to fetch security events and supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +**Custom Permissions:** +- **PrismaCloudCompute API Key**: A Palo Alto Prisma Cloud CWPP Monitor API username and password is required. [See the documentation to learn more about PrismaCloudCompute SIEM API](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Prisma%20Cloud%20CWPP/Data%20Connectors/readme.md). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Palo Alto Prisma Cloud CWPP Security Events to Microsoft Sentinel** + +To enable the Palo Alto Prisma Cloud CWPP Security Events for Microsoft Sentinel, provide the required information below and click on Connect. +> +- **Path to console**: europe-west3.cloud.twistlock.com/{sasid} +- **Prisma Access Key (API)**: Prisma Access Key (API) +- **Secret**: (password field) +- Click 'Connect' to establish connection + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/pathlock-tdnr.md b/Tools/Solutions Analyzer/connector-docs/connectors/pathlock-tdnr.md index d396c6c0bbd..e810ee33af9 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/pathlock-tdnr.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/pathlock-tdnr.md @@ -22,4 +22,42 @@ This precision-driven approach helps security teams drastically reduce false pos By combining business-context intelligence with advanced analytics, Pathlock enables enterprises to strengthen detection accuracy, streamline response actions, and maintain continuous control across their SAP environments—without adding complexity or redundant monitoring layers. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +**Custom Permissions:** +- **Microsoft Entra**: Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher. +- **Microsoft Azure**: Permission to assign Monitoring Metrics Publisher role on data collection rules. Typically requires Azure RBAC Owner or User Access Administrator role. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Create ARM Resources and Provide the Required Permissions** + +We will create data collection rule (DCR) and data collection endpoint (DCE) resources. We will also create a Microsoft Entra app registration and assign the required permissions to it. +#### Automated deployment of Azure resources +Clicking on "Deploy push connector resources" will trigger the creation of DCR and DCE resources. +It will then create a Microsoft Entra app registration with client secret and grant permissions on the DCR. This setup enables data to be sent securely to the DCR using a OAuth v2 client credentials. +- Deploy push connector resources + Application: Pathlock Inc. Threat Detection and Response for SAP + +**2. Maintain the data collection endpoint details and authentication info in your central instance of Pathlock's Cybersecurity Application Controls: Threat Detection and Response** + +Share the data collection endpoint URL and authentication info with the Pathlock administrator to configure the plug and play forwarding in Threat Detection and Response to send data to the data collection endpoint. +Please do not hesitate to contact Pathlock if support is needed. +- **Use this value to configure as Tenant ID in the LogIngestionAPI credential.**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Entra Application ID**: `ApplicationId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Entra Application Secret**: `ApplicationSecret` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Use this value to configure the LogsIngestionURL parameter when deploying the IFlow.**: `DataCollectionEndpoint` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **DCR Immutable ID**: `DataCollectionRuleId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/perimeter81activitylogs.md b/Tools/Solutions Analyzer/connector-docs/connectors/perimeter81activitylogs.md index ce83300f399..298ee8a2d7e 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/perimeter81activitylogs.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/perimeter81activitylogs.md @@ -10,4 +10,20 @@ The Perimeter 81 Activity Logs connector allows you to easily connect your Perimeter 81 activity logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Please note the values below and follow the instructions here to connect your Perimeter 81 activity logs with Microsoft Sentinel. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/phosphorus-polling.md b/Tools/Solutions Analyzer/connector-docs/connectors/phosphorus-polling.md index 0556755d277..7f66649360d 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/phosphorus-polling.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/phosphorus-polling.md @@ -10,4 +10,37 @@ The Phosphorus Device Connector provides the capability to Phosphorus to ingest device data logs into Microsoft Sentinel through the Phosphorus REST API. The Connector provides visibility into the devices enrolled in Phosphorus. This Data Connector pulls devices information along with its corresponding alerts. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Custom Permissions:** +- **REST API Credentials/permissions**: **Phosphorus API Key** is required. Please make sure that the API Key associated with the User has the Manage Settings permissions enabled. + + Follow these instructions to enable Manage Settings permissions. + 1. Log in to the Phosphorus Application + 2. Go to 'Settings' -> 'Groups' + 3. Select the Group the Integration user is a part of + 4. Navigate to 'Product Actions' -> toggle on the 'Manage Settings' permission. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**STEP 1 - Configuration steps for the Phosphorus API** + + Follow these instructions to create a Phosphorus API key. + 1. Log into your Phosphorus instance + 2. Navigate to Settings -> API + 3. If the API key has not already been created, press the **Add button** to create the API key + 4. The API key can now be copied and used during the Phosphorus Device connector configuration + +**2. Connect the Phosphorus Application with Microsoft Sentinel** + +**STEP 2 - Fill in the details below** + +>**IMPORTANT:** Before deploying the Phosphorus Device data connector, have the Phosphorus Instance Domain Name readily available as well as the Phosphorus API Key(s) +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `APIKey`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/pingfederate.md b/Tools/Solutions Analyzer/connector-docs/connectors/pingfederate.md index 50aa30bf585..cb52c0d0fec 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/pingfederate.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/pingfederate.md @@ -10,4 +10,59 @@ The [PingFederate](https://www.pingidentity.com/en/software/pingfederate.html) data connector provides the capability to ingest [PingFederate events](https://docs.pingidentity.com/bundle/pingfederate-102/page/lly1564002980532.html) into Microsoft Sentinel. Refer to [PingFederate documentation](https://docs.pingidentity.com/bundle/pingfederate-102/page/tle1564002955874.html) for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**PingFederateEvent**](https://aka.ms/sentinel-PingFederate-parser) which is deployed with the Microsoft Sentinel Solution. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Common Event Format (CEF) logs to Syslog agent** + +[Follow these steps](https://docs.pingidentity.com/bundle/pingfederate-102/page/gsn1564002980953.html) to configure PingFederate sending audit log via syslog in CEF format. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/pingfederateama.md b/Tools/Solutions Analyzer/connector-docs/connectors/pingfederateama.md index 2926077eb4f..ddb81be100d 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/pingfederateama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/pingfederateama.md @@ -10,4 +10,59 @@ The [PingFederate](https://www.pingidentity.com/en/software/pingfederate.html) data connector provides the capability to ingest [PingFederate events](https://docs.pingidentity.com/bundle/pingfederate-102/page/lly1564002980532.html) into Microsoft Sentinel. Refer to [PingFederate documentation](https://docs.pingidentity.com/bundle/pingfederate-102/page/tle1564002955874.html) for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**PingFederateEvent**](https://aka.ms/sentinel-PingFederate-parser) which is deployed with the Microsoft Sentinel Solution. +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Common Event Format (CEF) logs to Syslog agent** + + [Follow these steps](https://docs.pingidentity.com/bundle/pingfederate-102/page/gsn1564002980953.html) to configure PingFederate sending audit log via syslog in CEF format. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/pingoneauditlogsccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/pingoneauditlogsccpdefinition.md index 4993d0b9529..67a3e6ec47c 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/pingoneauditlogsccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/pingoneauditlogsccpdefinition.md @@ -10,4 +10,45 @@ This connector ingests **audit activity logs** from the PingOne Identity platform into Microsoft Sentinel using a Codeless Connector Framework. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Ping One connector to Microsoft Sentinel** +Before connecting to PingOne, ensure the following prerequisites are completed. Refer to the [document](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PingOne/README.md) for detailed setup instructions, including how to obtain client credentials and the environment ID. +#### 1. Client Credentials + You'll need client credentials, including your client id and client secret. +#### 2. Environment Id + To generate token and gather logs from audit activities endpoint +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Environment ID** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + +**Add domain** + +*Add domain* + +When you click the "Add domain" button in the portal, a configuration form will open. You'll need to provide: + +- **Client ID** (optional): Enter ID of the client +- **Client Secret** (optional): Enter your secret key +- **Environment ID** (optional): Enter your environment Id +- **Api domain** (optional): Enter your Api domain Eg.( pingone.com,pingone.eu etc )depending on the region credentials created for + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/postgresql.md b/Tools/Solutions Analyzer/connector-docs/connectors/postgresql.md index 284e240efa3..8d52817017c 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/postgresql.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/postgresql.md @@ -10,4 +10,72 @@ PostgreSQL data connector provides the capability to ingest [PostgreSQL](https://www.postgresql.org/) events into Microsoft Sentinel. Refer to [PostgreSQL documentation](https://www.postgresql.org/docs/current/index.html) for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on PostgreSQL parser based on a Kusto Function to work as expected. This parser is installed along with solution installation. + +**1. Install and onboard the agent for Linux or Windows** + +Install the agent on the Tomcat Server where the logs are generated. + +> Logs from PostgreSQL Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure PostgreSQL to write logs to files** + +1. Edit postgresql.conf file to write logs to files: + +>**log_destination** = 'stderr' + +>**logging_collector** = on + +Set the following parameters: **log_directory** and **log_filename**. Refer to the [PostgreSQL documentation for more details](https://www.postgresql.org/docs/current/runtime-config-logging.html) + +**3. Configure the logs to be collected** + +Configure the custom log directory to be collected +- **Open custom logs settings** + +1. Select the link above to open your workspace advanced settings +2. From the left pane, select **Settings**, select **Custom Logs** and click **+Add custom log** +3. Click **Browse** to upload a sample of a PostgreSQL log file. Then, click **Next >** +4. Select **Timestamp** as the record delimiter and click **Next >** +5. Select **Windows** or **Linux** and enter the path to PostgreSQL logs based on your configuration(e.g. for some Linux distros the default path is /var/log/postgresql/) +6. After entering the path, click the '+' symbol to apply, then click **Next >** +7. Add **PostgreSQL** as the custom log Name (the '_CL' suffix will be added automatically) and click **Done**. + +**2. Validate connectivity** + +It may take upwards of 20 minutes until your logs start to appear in Microsoft Sentinel. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/prancerlogdata.md b/Tools/Solutions Analyzer/connector-docs/connectors/prancerlogdata.md index 4a12806a0e1..b4b21b12679 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/prancerlogdata.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/prancerlogdata.md @@ -10,4 +10,27 @@ The Prancer Data Connector has provides the capability to ingest Prancer (CSPM)[https://docs.prancer.io/web/CSPM/] and [PAC](https://docs.prancer.io/web/PAC/introduction/) data to process through Microsoft Sentinel. Refer to [Prancer Documentation](https://docs.prancer.io/web) for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Include custom pre-requisites if the connectivity requires - else delete customs**: Description for any custom pre-requisite + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Prancer REST API to pull logs into Microsoft sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +STEP 1: Follow the documentation on the [Prancer Documentation Site](https://docs.prancer.io/web/) in order to set up an scan with an azure cloud connector. + +STEP 2: Once the scan is created go to the 'Third Part Integrations' menu for the scan and select Sentinel. + +STEP 3: Create follow the configuration wizard to select where in Azure the results should be sent to. + +STEP 4: Data should start to get fed into Microsoft Sentinel for processing. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/premiummicrosoftdefenderforthreatintelligence.md b/Tools/Solutions Analyzer/connector-docs/connectors/premiummicrosoftdefenderforthreatintelligence.md index a21f66da176..89849de3c9d 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/premiummicrosoftdefenderforthreatintelligence.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/premiummicrosoftdefenderforthreatintelligence.md @@ -10,4 +10,17 @@ Microsoft Sentinel provides you the capability to import threat intelligence generated by Microsoft to enable monitoring, alerting and hunting. Use this data connector to import Indicators of Compromise (IOCs) from Premium Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel. Threat indicators can include IP addresses, domains, URLs, and file hashes, etc. Note: This is a paid connector. To use and ingest data from it, please purchase the "MDTI API Access" SKU from the Partner Center. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Use this data connector to import Indicators of Compromise (IOCs) from Premium Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel.** + +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `PremiumMicrosoftDefenderForThreatIntelligence`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/prismacloudcomputenativepoller.md b/Tools/Solutions Analyzer/connector-docs/connectors/prismacloudcomputenativepoller.md index 4aade70c0dc..97c2986077e 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/prismacloudcomputenativepoller.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/prismacloudcomputenativepoller.md @@ -10,4 +10,26 @@ The [Palo Alto Prisma Cloud CWPP](https://prisma.pan.dev/api/cloud/cwpp/audits/#operation/get-audits-incidents) data connector allows you to connect to your Prisma Cloud CWPP instance and ingesting alerts into Microsoft Sentinel. The data connector is built on Microsoft Sentinel’s Codeless Connector Platform and uses the Prisma Cloud API to fetch security events and supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +**Custom Permissions:** +- **PrismaCloudCompute API Key**: A Palo Alto Prisma Cloud CWPP Monitor API username and password is required. [See the documentation to learn more about PrismaCloudCompute SIEM API](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Prisma%20Cloud%20CWPP/Data%20Connectors/readme.md). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Palo Alto Prisma Cloud CWPP Security Events to Microsoft Sentinel** + +To enable the Palo Alto Prisma Cloud CWPP Security Events for Microsoft Sentinel, provide the required information below and click on Connect. +> +- **Path to console**: https://europe-west3.cloud.twistlock.com/{sasid} +- **Prisma Access Key (API)**: Prisma Access Key (API) +- **Secret**: (password field) +- Click 'Connect' to establish connection + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/proofpointccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/proofpointccpdefinition.md index 33c7f62ca8d..c5db81bb851 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/proofpointccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/proofpointccpdefinition.md @@ -10,4 +10,36 @@ Proofpoint On Demand Email Security data connector provides the capability to get Proofpoint on Demand Email Protection data, allows users to check message traceability, monitoring into email activity, threats,and data exfiltration by attackers and malicious insiders. The connector provides ability to review events in your org on an accelerated basis, get event log files in hourly increments for recent activity. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. + +**Custom Permissions:** +- **Websocket API Credentials/permissions**: **ProofpointClusterID**, and **ProofpointToken** are required. [See the documentation to learn more about API](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +#### Configuration steps for the Proofpoint POD Websocket API + #### The PoD Log API does not allow use of the same token for more than one session at the same time, so make sure your token isn't used anywhere. + Proofpoint Websocket API service requires Remote Syslog Forwarding license. Please refer the [documentation](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API) on how to enable and check PoD Log API. + You must provide your cluster id and security token. +#### 1. Retrieve the cluster id + 1.1. Log in to the [proofpoint](https://admin.proofpoint.com/) [**Management Console**] with Admin user credentials + + 1.2. In the **Management Console**, the cluster id is displayed in the upper-right corner. +#### 2. Retrieve the API token + 2.1. Log in to the [proofpoint](https://admin.proofpoint.com/) [**Management Console**] with Admin user credentials + + 2.2. In the **Management Console**, click **Settings** -> **API Key Management** + + 2.3. Under **API Key Management** click on the **PoD Logging** tab. + + 2.4. Get or create a new API key. +- **Cluster Id**: cluster_id +- **API Key**: API Key +- Click 'Connect' to establish connection + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/proofpointpod.md b/Tools/Solutions Analyzer/connector-docs/connectors/proofpointpod.md index 8323dc6978f..f96809c03b9 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/proofpointpod.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/proofpointpod.md @@ -10,4 +10,93 @@ Proofpoint On Demand Email Security data connector provides the capability to get Proofpoint on Demand Email Protection data, allows users to check message traceability, monitoring into email activity, threats,and data exfiltration by attackers and malicious insiders. The connector provides ability to review events in your org on an accelerated basis, get event log files in hourly increments for recent activity.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

+## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Websocket API Credentials/permissions**: **ProofpointClusterID**, **ProofpointToken** is required. [See the documentation to learn more about API](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Proofpoint Websocket API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-proofpointpod-parser) to create the Kusto functions alias, **ProofpointPOD** + +**STEP 1 - Configuration steps for the Proofpoint Websocket API** + +1. Proofpoint Websocket API service requires Remote Syslog Forwarding license. Please refer the [documentation](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API) on how to enable and check PoD Log API. +2. You must provide your cluster id and security token. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Proofpoint On Demand Email Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Proofpoint POD Log API credentials, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Proofpoint On Demand Email Security data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-proofpointpod-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID**, **Workspace Key**, **ProofpointClusterID**, **ProofpointToken** and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Proofpoint On Demand Email Security data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> NOTE:You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-proofpointpod-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ProofpointXXXXX). + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + ProofpointClusterID + ProofpointToken + WorkspaceID + WorkspaceKey + logAnalyticsUri (optional) + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +3. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/proofpointtap.md b/Tools/Solutions Analyzer/connector-docs/connectors/proofpointtap.md index 7030bf66eec..54ba6732c07 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/proofpointtap.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/proofpointtap.md @@ -10,4 +10,77 @@ The [Proofpoint Targeted Attack Protection (TAP)](https://www.proofpoint.com/us/products/advanced-threat-protection/targeted-attack-protection) connector provides the capability to ingest Proofpoint TAP logs and events into Microsoft Sentinel. The connector provides visibility into Message and Click events in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

+## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Proofpoint TAP API Key**: A Proofpoint TAP API username and password is required. [See the documentation to learn more about Proofpoint SIEM API](https://help.proofpoint.com/Threat_Insight_Dashboard/API_Documentation/SIEM_API). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to Proofpoint TAP to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Configuration steps for the Proofpoint TAP API** + +1. Log into the Proofpoint TAP console +2. Navigate to **Connect Applications** and select **Service Principal** +3. Create a **Service Principal** (API Authorization Key) + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Proofpoint TAP connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Proofpoint TAP API Authorization Key(s), readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Option 1 - Azure Resource Manager (ARM) Template** + + Use this method for automated deployment of the Proofpoint TAP connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelproofpointtapazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelproofpointtapazuredeploy-gov) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password**, and validate the **Uri**. +> - The default URI is pulling data for the last 300 seconds (5 minutes) to correspond with the default Function App Timer trigger of 5 minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. +> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + + **Option 2 - Manual Deployment of Azure Functions** + + This method provides the step-by-step instructions to deploy the Proofpoint TAP connector manually with Azure Function (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + 1. Download the [Azure Function App](https://aka.ms/sentinelproofpointtapazurefunctionzip) file. Extract archive to your local development computer. +2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode. +3. After successful deployment of the function app, follow next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. Go to Azure Portal for the Function App configuration. +2. In the Function App, select the Function App Name and select **Configuration**. +3. In the **Application settings** tab, select **+ New application setting**. +4. Add each of the following six (6) application settings individually, with their respective string values (case-sensitive): + apiUsername + apipassword + workspaceID + workspaceKey + uri + logAnalyticsUri (optional) +> - Set the `uri` value to: `https://tap-api-v2.proofpoint.com/v2/siem/all?format=json&sinceSeconds=300` +> - The default URI is pulling data for the last 300 seconds (5 minutes) to correspond with the default Function App Timer trigger of 5 minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly to prevent overlapping data ingestion. +> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. +> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us` +5. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/proofpointtapv2.md b/Tools/Solutions Analyzer/connector-docs/connectors/proofpointtapv2.md index 92ad527415f..1f48c72b206 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/proofpointtapv2.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/proofpointtapv2.md @@ -10,4 +10,30 @@ The [Proofpoint Targeted Attack Protection (TAP)](https://www.proofpoint.com/us/products/advanced-threat-protection/targeted-attack-protection) connector provides the capability to ingest Proofpoint TAP logs and events into Microsoft Sentinel. The connector provides visibility into Message and Click events in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. + +**Custom Permissions:** +- **Proofpoint TAP API Key**: A Proofpoint TAP API service principal and secret is required to access Proofpoint's SIEM API. [See the documentation to learn more about Proofpoint SIEM API](https://help.proofpoint.com/Threat_Insight_Dashboard/API_Documentation/SIEM_API). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**Configuration steps for the Proofpoint TAP API** + +1. Log into the [Proofpoint TAP dashboard](https://threatinsight.proofpoint.com/) +2. Navigate to **Settings** and go to **Connected Applications** tab + 3. Click on **Create New Credential** + 4. Provide a name and click **Generate** + 5. Copy **Service Principal** and **Secret** values + +>**NOTE:** This connector depends on a parser based on Kusto Function to work as expected [**ProofpointTAPEvent**](https://aka.ms/sentinel-ProofpointTAPDataConnector-parser) which is deployed with the Microsoft Sentinel Solution. + +- **Service Principal**: 123456 +- **Secret**: (password field) +- Click 'Connect' to establish connection + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/pulseconnectsecure.md b/Tools/Solutions Analyzer/connector-docs/connectors/pulseconnectsecure.md index 290aa3e52e8..8d25803cdcb 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/pulseconnectsecure.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/pulseconnectsecure.md @@ -10,4 +10,47 @@ The [Pulse Connect Secure](https://www.pulsesecure.net/products/pulse-connect-secure/) connector allows you to easily connect your Pulse Connect Secure logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigations. Integrating Pulse Connect Secure with Microsoft Sentinel provides more insight into your organization's network and improves your security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +**Custom Permissions:** +- **Pulse Connect Secure**: must be configured to export logs via Syslog + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Pulse Connect Secure and load the function code or click [here](https://aka.ms/sentinel-PulseConnectSecure-parser), on the second line of the query, enter the hostname(s) of your Pulse Connect Secure device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. + 2. Select **Apply below configuration to my machines** and select the facilities and severities. + 3. Click **Save**. +- **Open Syslog settings** + +**3. Configure and connect the Pulse Connect Secure** + +[Follow the instructions](https://help.ivanti.com/ps/help/en_US/PPS/9.1R13/ag/configuring_an_external_syslog_server.htm) to enable syslog streaming of Pulse Connect Secure logs. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/qscoutappeventsccfdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/qscoutappeventsccfdefinition.md index d3f46b6a845..2ac5fb8928a 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/qscoutappeventsccfdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/qscoutappeventsccfdefinition.md @@ -10,4 +10,24 @@ Ingest Qscout application events into Microsoft Sentinel +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required + +**Custom Permissions:** +- **Qscout organization id**: The API requires your organization ID in Qscout. +- **Qscout organization API key**: The API requires your organization API key in Qscout. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Codeless Connector Framework (CCF) to connect to the Qscout app events feed and ingest data into Microsoft Sentinel + +Provide the required values below: +- **Qscout Organization ID**: 123456 +- **Qscout Organization API Key**: abcdxyz +- Click 'Connect' to establish connection + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/qualyskb.md b/Tools/Solutions Analyzer/connector-docs/connectors/qualyskb.md index 7202e2f3c8f..bed3928e526 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/qualyskb.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/qualyskb.md @@ -14,4 +14,85 @@ The [Qualys Vulnerability Management (VM)](https://www.qualys.com/apps/vulnerabi This data can used to correlate and enrich vulnerability detections found by the [Qualys Vulnerability Management (VM)](https://docs.microsoft.com/azure/sentinel/connect-qualys-vm) data connector. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Qualys API Key**: A Qualys VM API username and password is required. [See the documentation to learn more about Qualys VM API](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias QualysVM Knowledgebase and load the function code or click [here](https://aka.ms/sentinel-crowdstrikefalconendpointprotection-parser), on the second line of the query, enter the hostname(s) of your QualysVM Knowledgebase device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. + +>This data connector depends on a parser based on a Kusto Function to work as expected. [Follow the steps](https://aka.ms/sentinel-qualyskb-parser) to use the Kusto function alias, **QualysKB** + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Configuration steps for the Qualys API** + +1. Log into the Qualys Vulnerability Management console with an administrator account, select the **Users** tab and the **Users** subtab. +2. Click on the **New** drop-down menu and select **Users**. +3. Create a username and password for the API account. +4. In the **User Roles** tab, ensure the account role is set to **Manager** and access is allowed to **GUI** and **API** +4. Log out of the administrator account and log into the console with the new API credentials for validation, then log out of the API account. +5. Log back into the console using an administrator account and modify the API accounts User Roles, removing access to **GUI**. +6. Save all changes. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Qualys KB connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Qualys API username and password, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Option 1 - Azure Resource Manager (ARM) Template** + + Use this method for automated deployment of the Qualys KB connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-qualyskb-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-qualyskb-azuredeploy-gov) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password** , update the **URI**, and any additional URI **Filter Parameters** (This value should include a "&" symbol between each parameter and should not include any spaces) +> - Enter the URI that corresponds to your region. The complete list of API Server URLs can be [found here](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf#G4.735348) +> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + - Note: If deployment failed due to the storage account name being taken, change the **Function Name** to a unique value and redeploy. + + **Option 2 - Manual Deployment of Azure Functions** + + This method provides the step-by-step instructions to deploy the Qualys KB connector manually with Azure Function. +**Step 1 - Deploy a Function App** + + 1. Download the [Azure Function App](https://aka.ms/sentinel-qualyskb-functioncode) file. Extract archive to your local development computer. +2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode. +3. After successful deployment of the function app, follow next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. Go to Azure Portal for the Function App configuration. +2. In the Function App, select the Function App Name and select **Configuration**. +3. In the **Application settings** tab, select **+ New application setting**. +4. Add each of the following seven (7) application settings individually, with their respective string values (case-sensitive): + apiUsername + apiPassword + workspaceID + workspaceKey + uri + filterParameters + logAnalyticsUri (optional) +> - Enter the URI that corresponds to your region. The complete list of API Server URLs can be [found here](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf#G4.735348). The `uri` value must follow the following schema: `https:///api/2.0` +> - Add any additional filter parameters, for the `filterParameters` variable, that need to be appended to the URI. The `filterParameter` value should include a "&" symbol between each parameter and should not include any spaces. +> - Note: If using Azure Key Vault, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. + - Use logAnalyticsUri to override the log analytics API endpoint for delegated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +5. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/qualysvmlogsccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/qualysvmlogsccpdefinition.md index da0fc71223a..d62fa4a0520 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/qualysvmlogsccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/qualysvmlogsccpdefinition.md @@ -10,4 +10,37 @@ The [Qualys Vulnerability Management (VM)](https://www.qualys.com/apps/vulnerability-management/) data connector provides the capability to ingest vulnerability host detection data into Microsoft Sentinel through the Qualys API. The connector provides visibility into host detection data from vulerability scans. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **API access and roles**: Ensure the Qualys VM user has a role of Reader or higher. If the role is Reader, ensure that API access is enabled for the account. Auditor role is not supported to access the API. For more details, refer to the Qualys VM [Host Detection API](https://docs.qualys.com/en/vm/qweb-all-api/mergedProjects/qapi-assets/host_lists/host_detection.htm#v_3_0) and [User role Comparison](https://qualysguard.qualys.com/qwebhelp/fo_portal/user_accounts/user_roles_comparison_vm.htm) document. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Qualys Vulnerability Management to Microsoft Sentinel** +>**NOTE:** To gather data for Detections based on Host, expand the **DetectionList** column in the table. +To gather data from Qualys VM, you need to provide the following resources +#### 1. API Credentials + To gather data from Qualys VM, you'll need Qualys API credentials, including your Username and Password. +#### 2. API Server URL + To gather data from Qualys VM, you'll need the Qualys API server URL specific to your region. You can find the exact API server URL for your region [here](https://www.qualys.com/platform-identification/#api-urls) +- **Qualys API User Name**: Enter UserName +- **Qualys API Password**: (password field) +- **Qualys API Server URL**: Enter API Server URL +#### 3. Truncation Limit + Configure the maximum number of host records to retrieve per API call (20-5000 range). Higher values may improve performance but could impact API response times. +- **Truncation Limit** (select) + - 1000 - API default value + - 20 - Minimal load, slower collection + - 100 - Low load + - 500 - Moderate load + - 2500 - High load, faster collection + - ... and 1 more options +- Click 'Connect' to establish connection + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/qualysvulnerabilitymanagement.md b/Tools/Solutions Analyzer/connector-docs/connectors/qualysvulnerabilitymanagement.md index 983458015bd..51d86a5de81 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/qualysvulnerabilitymanagement.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/qualysvulnerabilitymanagement.md @@ -14,4 +14,108 @@ The [Qualys Vulnerability Management (VM)](https://www.qualys.com/apps/vulnerabi

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

+## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Qualys API Key**: A Qualys VM API username and password is required. [See the documentation to learn more about Qualys VM API](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to Qualys VM to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Configuration steps for the Qualys VM API** + +1. Log into the Qualys Vulnerability Management console with an administrator account, select the **Users** tab and the **Users** subtab. +2. Click on the **New** drop-down menu and select **Users..** +3. Create a username and password for the API account. +4. In the **User Roles** tab, ensure the account role is set to **Manager** and access is allowed to **GUI** and **API** +4. Log out of the administrator account and log into the console with the new API credentials for validation, then log out of the API account. +5. Log back into the console using an administrator account and modify the API accounts User Roles, removing access to **GUI**. +6. Save all changes. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Qualys VM connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Qualys VM API Authorization Key(s), readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +>**NOTE:** This connector has been updated, if you have previously deployed an earlier version, and want to update, please delete the existing Qualys VM Azure Function before redeploying this version. Please use Qualys V2 version Workbook, detections. + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Qualys VM connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-QualysVM-azuredeployV2) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-QualysVM-azuredeployV2-gov) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password** , update the **URI**, and any additional URI **Filter Parameters** (each filter should be separated by an "&" symbol, no spaces.) +> - Enter the URI that corresponds to your region. The complete list of API Server URLs can be [found here](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf#G4.735348) -- There is no need to add a time suffix to the URI, the Function App will dynamically append the Time Value to the URI in the proper format. + - The default **Time Interval** is set to pull the last five (5) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. +> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Quayls VM connector manually with Azure Functions. + +**1. Create a Function App** + +1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp), and select **+ Add**. +2. In the **Basics** tab, ensure Runtime stack is set to **Powershell Core**. +3. In the **Hosting** tab, ensure the **Consumption (Serverless)** plan type is selected. +4. Make other preferrable configuration changes, if needed, then click **Create**. + +**2. Import Function App Code** + +1. In the newly created Function App, select **Functions** on the left pane and click **+ New Function**. +2. Select **Timer Trigger**. +3. Enter a unique Function **Name** and leave the default cron schedule of every 5 minutes, then click **Create**. +5. Click on **Code + Test** on the left pane. +6. Copy the [Function App Code](https://aka.ms/sentinel-QualysVM-functioncodeV2) and paste into the Function App `run.ps1` editor. +7. Click **Save**. + +**3. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following eight (8) application settings individually, with their respective string values (case-sensitive): + apiUsername + apiPassword + workspaceID + workspaceKey + uri + filterParameters + timeInterval + logAnalyticsUri (optional) +> - Enter the URI that corresponds to your region. The complete list of API Server URLs can be [found here](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf#G4.735348). The `uri` value must follow the following schema: `https:///api/2.0/fo/asset/host/vm/detection/?action=list&vm_processed_after=` -- There is no need to add a time suffix to the URI, the Function App will dynamically append the Time Value to the URI in the proper format. +> - Add any additional filter parameters, for the `filterParameters` variable, that need to be appended to the URI. Each parameter should be seperated by an "&" symbol and should not include any spaces. +> - Set the `timeInterval` (in minutes) to the value of `5` to correspond to the Timer Trigger of every `5` minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly to prevent overlapping data ingestion. +> - Note: If using Azure Key Vault, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. +> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + +**4. Configure the host.json**. + +Due to the potentially large amount of Qualys host detection data being ingested, it can cause the execution time to surpass the default Function App timeout of five (5) minutes. Increase the default timeout duration to the maximum of ten (10) minutes, under the Consumption Plan, to allow more time for the Function App to execute. + +1. In the Function App, select the Function App Name and select the **App Service Editor** blade. +2. Click **Go** to open the editor, then select the **host.json** file under the **wwwroot** directory. +3. Add the line `"functionTimeout": "00:10:00",` above the `managedDependancy` line +4. Ensure **SAVED** appears on the top right corner of the editor, then exit the editor. + +> NOTE: If a longer timeout duration is required, consider upgrading to an [App Service Plan](https://docs.microsoft.com/azure/azure-functions/functions-scale#timeout) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/radiflowisid.md b/Tools/Solutions Analyzer/connector-docs/connectors/radiflowisid.md index 1d259a59144..c1a5a9fa077 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/radiflowisid.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/radiflowisid.md @@ -10,4 +10,65 @@ iSID enables non-disruptive monitoring of distributed ICS networks for changes in topology and behavior, using multiple security packages, each offering a unique capability pertaining to a specific type of network activity +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**RadiflowEvent**] which is deployed with the Microsoft Sentinel Solution. +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade. + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule). + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy._ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine. + + **Step B. Configure iSID to send logs using CEF** + + Configure log forwarding using CEF: + +1. Navigate to the **System Notifications** section of the Configuration menu. + +2. Under Syslog, select **+Add**. + +3. In the **New Syslog Server** dialog specify the name, remote server **IP**, **Port**, **Transport** and select **Format** - **CEF**. + +4. Press **Apply** to exit the **Add Syslog dialog**. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python --version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/redcanarydataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/redcanarydataconnector.md index 8dd5a5f96f3..2b29198e48e 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/redcanarydataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/redcanarydataconnector.md @@ -10,4 +10,20 @@ The Red Canary data connector provides the capability to ingest published Detections into Microsoft Sentinel using the Data Collector REST API. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Create an Automate Playbook and Trigger as detailed in [this article](https://help.redcanary.com/hc/en-us/articles/4410957523479-Azure-Sentinel). You can skip the **Add analysis rule to Microsoft Sentinel** section; this data connector allows you to import the analysis rule directly into your workspace. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ridgebotdataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/ridgebotdataconnector.md index e2abe148b62..b8b9ea23be9 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ridgebotdataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ridgebotdataconnector.md @@ -10,4 +10,61 @@ The RidgeBot connector lets users connect RidgeBot with Microsoft Sentinel, allowing creation of Dashboards, Workbooks, Notebooks and Alerts. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Common Event Format (CEF) logs to Syslog agent** + + Configure the RidgeBot to forward events to syslog server as described here: https://portal.ridgesecurity.ai/downloadurl/89x72912. Generate some attack events for your application. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/rsaidplus-adminglogs-connector.md b/Tools/Solutions Analyzer/connector-docs/connectors/rsaidplus-adminglogs-connector.md index 4d4cb4fdb3b..6db34695fc8 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/rsaidplus-adminglogs-connector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/rsaidplus-adminglogs-connector.md @@ -10,4 +10,37 @@ The RSA ID Plus AdminLogs Connector provides the capability to ingest [Cloud Admin Console Audit Events](https://community.rsa.com/s/article/Cloud-Administration-Event-Log-API-5d22ba17) into Microsoft Sentinel using Cloud Admin APIs. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **RSA ID Plus API Authentication**: To access the Admin APIs, a valid Base64URL encoded JWT token, signed with the client's Legacy Administration API key is required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Codeless Connector Framework (CCF) to connect to the RSA ID Plus Cloud Admin APIs to pull logs into Microsoft Sentinel. + +**1. **STEP 1** - Create Legacy Admin API Client in Cloud Admin Console.** + +Follow steps mentioned in this [page](https://community.rsa.com/s/article/Manage-Legacy-Clients-API-Keys-a89c9cbc#). + +**2. **STEP 2** - Generate the Base64URL encoded JWT Token.** + +Follow the steps mentioned in this [page](https://community.rsa.com/s/article/Authentication-for-the-Cloud-Administration-APIs-a04e3fb9) under the header 'Legacy Administration API'. + +**3. **STEP 3** - Configure the Cloud Admin API to start ingesting Admin event logs into Microsoft Sentinel.** + +Provide the required values below: +- **Admin API URL**: https://.access.securid.com/AdminInterface/restapi/v1/adminlog/exportLogs +- **JWT Token**: (password field) + +**4. **STEP 4** - Click Connect** + +Verify all the fields above were filled in correctly. Press Connect to start the connector. +- Click 'Connect' to establish connection + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/rsasecuridam.md b/Tools/Solutions Analyzer/connector-docs/connectors/rsasecuridam.md index 8541973ce72..0b11a10664d 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/rsasecuridam.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/rsasecuridam.md @@ -10,4 +10,51 @@ The [RSA® SecurID Authentication Manager](https://www.securid.com/) data connector provides the capability to ingest [RSA® SecurID Authentication Manager events](https://community.rsa.com/t5/rsa-authentication-manager/rsa-authentication-manager-log-messages/ta-p/630160) into Microsoft Sentinel. Refer to [RSA® SecurID Authentication Manager documentation](https://community.rsa.com/t5/rsa-authentication-manager/getting-started-with-rsa-authentication-manager/ta-p/569582) for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**RSASecurIDAMEvent**](https://aka.ms/sentinel-rsasecuridam-parser) which is deployed with the Microsoft Sentinel Solution. + +>**NOTE:** This data connector has been developed using RSA SecurID Authentication Manager version: 8.4 and 8.5 + +**1. Install and onboard the agent for Linux or Windows** + +Install the agent on the Server where the RSA® SecurID Authentication Manager logs are forwarded. + +> Logs from RSA® SecurID Authentication Manager Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure RSA® SecurID Authentication Manager event forwarding** + +Follow the configuration steps below to get RSA® SecurID Authentication Manager logs into Microsoft Sentinel. +1. [Follow these instructions](https://community.rsa.com/t5/rsa-authentication-manager/configure-the-remote-syslog-host-for-real-time-log-monitoring/ta-p/571374) to forward alerts from the Manager to a syslog server. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/rubriksecuritycloudazurefunctions.md b/Tools/Solutions Analyzer/connector-docs/connectors/rubriksecuritycloudazurefunctions.md index 66f080cf53d..8e3058058fb 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/rubriksecuritycloudazurefunctions.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/rubriksecuritycloudazurefunctions.md @@ -10,4 +10,121 @@ The Rubrik Security Cloud data connector enables security operations teams to integrate insights from Rubrik's Data Observability services into Microsoft Sentinel. The insights include identification of anomalous filesystem behavior associated with ransomware and mass deletion, assess the blast radius of a ransomware attack, and sensitive data operators to prioritize and more rapidly investigate potential incidents. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Rubrik webhook which push its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Rubrik Microsoft Sentinel data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**2. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Rubrik connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-RubrikWebhookEvents-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the below information : + Function Name + Workspace ID + Workspace Key + AnomaliesTableName + RansomwareAnalysisTableName + ThreatHuntsTableName + EventsTableName + LogLevel + +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**3. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Rubrik Microsoft Sentinel data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-RubrikWebhookEvents-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. RubrikXXXXX). + + e. **Select a runtime:** Choose Python 3.8 or above. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective values (case-sensitive): + WorkspaceID + WorkspaceKey + AnomaliesTableName + RansomwareAnalysisTableName + ThreatHuntsTableName + EventsTableName + LogLevel + logAnalyticsUri (optional) + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: https://.ods.opinsights.azure.us. +4. Once all application settings have been entered, click **Save**. + +**Post Deployment steps** + +**7. 1) Get the Function app endpoint** + +1. Go to Azure function Overview page and Click on **"Functions"** tab. +2. Click on the function called **"RubrikHttpStarter"**. +3. Go to **"GetFunctionurl"** and copy the function url. + +**8. 2) Add a webhook in RubrikSecurityCloud to send data to Microsoft Sentinel.** + +Follow the Rubrik User Guide instructions to [Add a Webhook](https://docs.rubrik.com/en-us/saas/saas/common/adding_webhook.html) to begin receiving event information + 1. Select the Microsoft Sentinel as the webhook Provider + 2. Enter the desired Webhook name + 3. Enter the URL part from copied Function-url as the webhook URL endpoint and replace **{functionname}** with **"RubrikAnomalyOrchestrator"**, for the Rubrik Microsoft Sentinel Solution + 4. Select the EventType as Anomaly + 5. Select the following severity levels: Critical, Warning, Informational + 6. Choose multiple log types, if desired, when running **"RubrikEventsOrchestrator"** + 7. Repeat the same steps to add webhooks for Anomaly Detection Analysis, Threat Hunt and Other Events. + + + NOTE: while adding webhooks for Anomaly Detection Analysis, Threat Hunt and Other Events, replace **{functionname}** with **"RubrikRansomwareOrchestrator"**, **"RubrikThreatHuntOrchestrator"** and **"RubrikEventsOrchestrator"** respectively in copied function-url. + +*Now we are done with the rubrik Webhook configuration. Once the webhook events triggered , you should be able to see the Anomaly, Anomaly Detection Analysis, Threat Hunt events and Other Events from the Rubrik into respective LogAnalytics workspace table called "Rubrik_Anomaly_Data_CL", "Rubrik_Ransomware_Data_CL", "Rubrik_ThreatHunt_Data_CL", and "Rubrik_Events_Data_CL".* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/sailpointidentitynow.md b/Tools/Solutions Analyzer/connector-docs/connectors/sailpointidentitynow.md index 8d6bd39423c..eb58640c888 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/sailpointidentitynow.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/sailpointidentitynow.md @@ -10,4 +10,97 @@ The [SailPoint](https://www.sailpoint.com/) IdentityNow data connector provides the capability to ingest [SailPoint IdentityNow] search events into Microsoft Sentinel through the REST API. The connector provides customers the ability to extract audit information from their IdentityNow tenant. It is intended to make it even easier to bring IdentityNow user activity and governance events into Microsoft Sentinel to improve insights from your security incident and event monitoring solution. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **SailPoint IdentityNow API Authentication Credentials**: TENANT_ID, CLIENT_ID and CLIENT_SECRET are required for authentication. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the SailPoint IdentityNow REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Configuration steps for the SailPoint IdentityNow API** + + [Follow the instructions](https://community.sailpoint.com/t5/IdentityNow-Articles/Best-Practice-Using-Personal-Access-Tokens-in-IdentityNow/ta-p/150471) to obtain the credentials. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the SailPoint IdentityNow data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the SailPoint IdentityNow data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-sailpointidentitynow-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. +3. Enter other information and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the SailPoint IdentityNow data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-sailpointidentitynow-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. searcheventXXXXX). + + e. **Select a runtime:** Choose Python 3.9. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select ** New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + TENANT_ID + SHARED_KEY + LIMIT + GRANT_TYPE + CUSTOMER_ID + CLIENT_ID + CLIENT_SECRET + AZURE_STORAGE_ACCESS_KEY + AZURE_STORAGE_ACCOUNT_NAME + AzureWebJobsStorage + logAnalyticsUri (optional) +> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +3. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/salesforceservicecloud.md b/Tools/Solutions Analyzer/connector-docs/connectors/salesforceservicecloud.md index 0ccc1e3c42c..7b7d1abeff1 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/salesforceservicecloud.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/salesforceservicecloud.md @@ -14,4 +14,80 @@ The Salesforce Service Cloud data connector provides the capability to ingest in

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

+## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **Salesforce API Username**, **Salesforce API Password**, **Salesforce Security Token**, **Salesforce Consumer Key**, **Salesforce Consumer Secret** is required for REST API. [See the documentation to learn more about API](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/quickstart.htm). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Salesforce Lightning Platform REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias SalesforceServiceCloud and load the function code or click [here](https://aka.ms/sentinel-SalesforceServiceCloud-parser). The function usually takes 10-15 minutes to activate after solution installation/update. + +**STEP 1 - Configuration steps for the Salesforce Lightning Platform REST API** + +1. See the [link](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/quickstart.htm) and follow the instructions for obtaining Salesforce API Authorization credentials. +2. On the **Set Up Authorization** step choose **Session ID Authorization** method. +3. You must provide your client id, client secret, username, and password with user security token. + +>**NOTE:** Ingesting data from on an hourly interval may require additional licensing based on the edition of the Salesforce Service Cloud being used. Please refer to [Salesforce documentation](https://www.salesforce.com/editions-pricing/service-cloud/) and/or support for more details. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Salesforce Service Cloud data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Salesforce API Authorization credentials, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Option 1 - Azure Resource Manager (ARM) Template** + + Use this method for automated deployment of the Salesforce Service Cloud data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SalesforceServiceCloud-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID**, **Workspace Key**, **Salesforce API Username**, **Salesforce API Password**, **Salesforce Security Token**, **Salesforce Consumer Key**, **Salesforce Consumer Secret** and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + + **Option 2 - Manual Deployment of Azure Functions** + + Use the following step-by-step instructions to deploy the Salesforce Service Cloud data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-SalesforceServiceCloud-functionapp) file. Extract archive to your local development computer. +2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode. +3. After successful deployment of the function app, follow next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. Go to Azure Portal for the Function App configuration. +2. In the Function App, select the Function App Name and select **Configuration**. +3. In the **Application settings** tab, select **+ New application setting**. +4. Add each of the following application settings individually, with their respective string values (case-sensitive): + SalesforceUser + SalesforcePass + SalesforceSecurityToken + SalesforceConsumerKey + SalesforceConsumerSecret + WorkspaceID + WorkspaceKey + logAnalyticsUri (optional) + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us` +5. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/salesforceservicecloudccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/salesforceservicecloudccpdefinition.md index 9616ef606a9..9fe75485cf3 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/salesforceservicecloudccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/salesforceservicecloudccpdefinition.md @@ -10,4 +10,29 @@ The Salesforce Service Cloud data connector provides the capability to ingest information about your Salesforce operational events into Microsoft Sentinel through the REST API. The connector provides ability to review events in your org on an accelerated basis, get [event log files](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/event_log_file_hourly_overview.htm) in hourly increments for recent activity. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **Salesforce Service Cloud API access**: Access to the Salesforce Service Cloud API through a Connected App is required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect to Salesforce Service Cloud API to start collecting event logs in Microsoft Sentinel** + +Follow [Create a Connected App in Salesforce for OAuth](https://help.salesforce.com/s/articleView?id=platform.ev_relay_create_connected_app.htm&type=5) and [Configure a Connected App for the OAuth 2.0 Client Credentials Flow](https://help.salesforce.com/s/articleView?id=xcloud.connected_app_client_credentials_setup.htm&type=5) to create a Connected App with access to the Salesforce Service Cloud API. Through those instructions, you should get the Consumer Key and Consumer Secret. + For Salesforce Domain name, Go to Setup, type My Domain in the Quick Find box, and select My Domain to view your domain details. Make sure to enter the domain name without a trailing slash (e.g., https://your-domain.my.salesforce.com). Fill the form below with that information. +- **Salesforce Domain Name**: Salesforce Domain Name +- **Log Collection Interval** (select) + - Hourly + - Daily +- **OAuth Configuration**: + - Consumer Key + - Consumer Secret + - Click 'Connect' to authenticate + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/samsungdcdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/samsungdcdefinition.md index 68101909710..1ad23cd2787 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/samsungdcdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/samsungdcdefinition.md @@ -10,4 +10,62 @@ Samsung Knox Asset Intelligence Data Connector lets you centralize your mobile security events and logs in order to view customized insights using the Workbook template, and identify incidents based on Analytics Rules templates. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Entra app**: An Entra app needs to be registered and provisioned with ‘Microsoft Metrics Publisher’ role and configured with either Certificate or Client Secret as credentials for secure data transfer. See [the Log ingestion tutorial to learn more about Entra App creation, registration and credential configuration.](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal) + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +This Data Connector uses the Microsoft Log Ingestion API to push security events into Microsoft Sentinel from Samsung Knox Asset Intelligence (KAI) solution. + +**2. STEP 1 - Create and register an Entra Application** + +>**Note**: This Data Connector can support either Certificate-based or Client Secret-based authentication. For Certificate-based authentication, you can download the Samsung CA-signed certificate (public key) from [KAI documentation portal](https://docs.samsungknox.com/admin/knox-asset-intelligence/assets/samsung-knox-validation-certificate.crt). For Client Secret-based authentication, you can create the secret during the Entra application registration. Ensure you copy the Client Secret value as soon as it is generated. + +>**IMPORTANT:** Save the values for Tenant (Directory) ID and Client (Application) ID. If Client Secret-based authentication is enabled, save Client Secret (Secret Value) associated with the Entra app. + +**3. STEP 2 - Automate deployment of this Data Connector using the below Azure Resource Manager (ARM) template** + +>**IMPORTANT:** Before deploying the Data Connector, copy the below Workspace name associated with your Microsoft Sentinel (also your Log Analytics) instance. +- **Workspace Name**: `WorkspaceName` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +1. Click the button below to install Samsung Knox Intelligence Solution. + + [![DeployToAzure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SamsungDCDefinition-azuredeploy)\n2. Provide the following required fields: Log Analytics Workspace Name, Log Analytics Workspace Location, Log Analytics Workspace Subscription (ID) and Log Analytics Workspace Resource Group. + +**5. STEP 3 - Obtain Microsoft Sentinel Data Collection details** + +Once the ARM template is deployed, navigate to Data Collection Rules https://portal.azure.com/#browse/microsoft.insights%2Fdatacollectionrules? and save values associated with the Immutable ID (DCR) and Data Collection Endpoint (DCE). + +>**IMPORTANT:** To enable end-to-end integration, information related to Microsoft Sentinel DCE and DCR are required for configuration in Samsung Knox Asset Intelligence portal (STEP 4). + +Ensure the Entra Application created in STEP 1 has permissions to use the DCR created in order to send data to the DCE. Please refer to https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#assign-permissions-to-the-dcr to assign permissions accordingly. + +**6. STEP 4 - Connect to Samsung Knox Asset Intelligence solution to configure Microsoft Sentinel to push select Knox Security Events as Alerts** + +1. Login to [Knox Asset Intelligence administration portal](https://central.samsungknox.com/kaiadmin/dai/home) and navigate to **Dashboard Settings**; this is available at the top-right corner of the Portal. +> **Note**: Ensure the login user has access to 'Security' and 'Manage dashboard view and data collection' permissions. + +2. Click on Security tab to view settings for Microsoft Sentinel Integration and Knox Security Logs. + +3. In the Security Operations Integration page, toggle on **'Enable Microsoft Sentinel Integration'** and enter appropriate values in the required fields. + + >a. Based on the authentication method used, refer to information saved from STEP 1 while registering the Entra application. + + >b. For Microsoft Sentinel DCE and DCR, refer to the information saved from STEP 3. + +4. Click on **'Test Connection'** and ensure the connection is successful. + +5. Before you can Save, configure Knox Security Logs by selecting either Essential or Advanced configuration **(default: Essential).** + +6. To complete the Microsoft Sentinel integration, click **'Save'**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/sapbtpauditevents.md b/Tools/Solutions Analyzer/connector-docs/connectors/sapbtpauditevents.md index fa8b6b2e4cd..b4c74715c11 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/sapbtpauditevents.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/sapbtpauditevents.md @@ -10,4 +10,57 @@ SAP Business Technology Platform (SAP BTP) brings together data management, analytics, artificial intelligence, application development, automation, and integration in one, unified environment. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +**Custom Permissions:** +- **Client Id and Client Secret for Audit Retrieval API**: Enable API access in BTP. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**Step 1 - Configuration steps for the SAP BTP Audit Retrieval API** + +Follow the steps provided by SAP [see Audit Log Retrieval API for Global Accounts in the Cloud Foundry Environment](https://help.sap.com/docs/btp/sap-business-technology-platform/audit-log-retrieval-api-for-global-accounts-in-cloud-foundry-environment/). Take a note of the **url** (Audit Retrieval API URL), **uaa.url** (User Account and Authentication Server url) and the associated **uaa.clientid**. + +>**NOTE:** You can onboard one or more BTP subaccounts by following the steps provided by SAP [see Audit Log Retrieval API Usage for Subaccounts in the Cloud Foundry Environment](https://help.sap.com/docs/btp/sap-business-technology-platform/audit-log-retrieval-api-usage-for-subaccounts-in-cloud-foundry-environment/). Add a connection for each subaccount. + +**2. Connect events from SAP BTP to Microsoft Sentinel** + +Connect using OAuth client credentials +**BTP connection** + +When you click the "Add account" button in the portal, a configuration form will open. You'll need to provide: + +*Account Details* + +- **Subaccount name (e.g. Contoso). This will be projected to the InstanceName column.** (optional): no space or special character allowed! +- **SAP BTP Client ID** (optional): Client ID +- **SAP BTP Client Secret** (optional): Client Secret +- **Authorization server URL (UAA server)** (optional): https://your-tenant.authentication.region.hana.ondemand.com +- **Audit Retrieval API URL** (optional): https://auditlog-management.cfapps.region.hana.ondemand.com + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + + +**3. Subaccounts** + +Each row represents a connected subaccount +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Subaccount Name** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/sapetdalerts.md b/Tools/Solutions Analyzer/connector-docs/connectors/sapetdalerts.md index e3ac18deaef..af96247b815 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/sapetdalerts.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/sapetdalerts.md @@ -10,4 +10,58 @@ The SAP Enterprise Threat Detection, cloud edition (ETD) data connector enables ingestion of security alerts from ETD into Microsoft Sentinel, supporting cross-correlation, alerting, and threat hunting. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +**Custom Permissions:** +- **Client Id and Client Secret for ETD Retrieval API**: Enable API access in ETD. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**Step 1 - Configuration steps for the SAP ETD Audit Retrieval API** + +Follow the steps provided by SAP [see ETD docs](https://help.sap.com/docs/ETD/sap-business-technology-platform/audit-log-retrieval-api-for-global-accounts-in-cloud-foundry-environment/). Take a note of the **url** (Audit Retrieval API URL), **uaa.url** (User Account and Authentication Server url) and the associated **uaa.clientid**. + +>**NOTE:** You can onboard one or more ETD subaccounts by following the steps provided by SAP [see ETD docs](https://help.sap.com/docs/ETD/sap-business-technology-platform/audit-log-retrieval-api-usage-for-subaccounts-in-cloud-foundry-environment/). Add a connection for each subaccount. + +>**TIP:** Use the [shared blog series](https://community.sap.com/t5/enterprise-resource-planning-blog-posts-by-sap/sap-enterprise-threat-detection-cloud-edition-joins-forces-with-microsoft/ba-p/13942075) for additional info. + +**2. Connect events from SAP ETD to Microsoft Sentinel** + +Connect using OAuth client credentials +**ETD connection** + +When you click the "Add account" button in the portal, a configuration form will open. You'll need to provide: + +*Account Details* + +- **SAP ETD Client ID** (optional): Client ID +- **SAP ETD Client Secret** (optional): Client Secret +- **Authorization server URL (UAA server)** (optional): https://your-tenant.authentication.region.hana.ondemand.com/oauth/token +- **SAP ETD data retrieval API URL** (optional): https://your-etd-cloud-data-retrieval-service.cfapps.region.hana.ondemand.com + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + + +**3. ETD accounts** + +Each row represents a connected ETD account +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Data retrieval endpoint** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/saplogserv.md b/Tools/Solutions Analyzer/connector-docs/connectors/saplogserv.md index e734d3ee785..d42ad362d6a 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/saplogserv.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/saplogserv.md @@ -16,4 +16,43 @@ Near Realtime Log Collection: With ability to integrate into Microsoft Sentinel LogServ complements the existing SAP application layer threat monitoring and detections in Microsoft Sentinel with the log types owned by SAP ECS as the system provider. This includes logs like: SAP Security Audit Log (AS ABAP), HANA database, AS JAVA, ICM, SAP Web Dispatcher, SAP Cloud Connector, OS, SAP Gateway, 3rd party Database, Network, DNS, Proxy, Firewall +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +**Custom Permissions:** +- **Microsoft Entra**: Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher. +- **Microsoft Azure**: Permission to assign Monitoring Metrics Publisher role on data collection rules. Typically requires Azure RBAC Owner or User Access Administrator role. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Create ARM Resources and Provide the Required Permissions** + +We will create data collection rule (DCR) and data collection endpoint (DCE) resources. We will also create a Microsoft Entra app registration and assign the required permissions to it. +#### Automated deployment of Azure resources +Clicking on "Deploy push connector resources" will trigger the creation of DCR and DCE resources. +It will then create a Microsoft Entra app registration with client secret and grant permissions on the DCR. This setup enables data to be sent securely to the DCR using a OAuth v2 client credentials. +- Deploy push connector resources + Application: SAP LogServ push to Microsoft Sentinel + +**2. Maintain the data collection endpoint details and authentication info in SAP LogServ** + +Share the data collection endpoint URL and authentication info with the SAP LogServ administrator to configure the SAP LogServ to send data to the data collection endpoint. + +Learn more from [this blog series](https://community.sap.com/t5/enterprise-resource-planning-blog-posts-by-members/ultimate-blog-series-sap-logserv-integration-with-microsoft-sentinel/ba-p/14126401). +- **Use this value to configure as Tenant ID in the LogIngestionAPI credential.**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Entra Application ID**: `ApplicationId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Entra Application Secret**: `ApplicationSecret` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Use this value to configure the LogsIngestionURL parameter when deploying the IFlow.**: `DataCollectionEndpoint` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **DCR Immutable ID**: `DataCollectionRuleId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/saps4publicalerts.md b/Tools/Solutions Analyzer/connector-docs/connectors/saps4publicalerts.md index 8a42141f546..6e20807b304 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/saps4publicalerts.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/saps4publicalerts.md @@ -10,4 +10,61 @@ The SAP S/4HANA Cloud Public Edition (GROW with SAP) data connector enables ingestion of SAP's security audit log into the Microsoft Sentinel Solution for SAP, supporting cross-correlation, alerting, and threat hunting. Looking for alternative authentication mechanisms? See [here](https://github.com/Azure-Samples/Sentinel-For-SAP-Community/tree/main/integration-artifacts). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +**Custom Permissions:** +- **Client Id and Client Secret for Audit Retrieval API**: Enable API access in BTP. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**Step 1 - Configuration steps for SAP S/4HANA Cloud Public Edition** + +To connect to SAP S/4HANA Cloud Public Edition, you will need: + +1. Configure a communication arrangement for communication scenario **[SAP_COM_0750](https://help.sap.com/docs/SAP_S4HANA_CLOUD/0f69f8fb28ac4bf48d2b57b9637e81fa/a93dca70e2ce43d19ac93e3e5531e37d.html)** + +2. SAP S/4HANA Cloud Public Edition tenant **API URL** +3. Valid **communication user (username and password)** for your SAP S/4HANA Cloud system +4. **Appropriate authorizations** to access audit log data via OData services + +>**NOTE:** This connector supports Basic authentication. Looking for alternative authentication mechanisms? See [here](https://github.com/Azure-Samples/Sentinel-For-SAP-Community/tree/main/integration-artifacts) + +**2. Connect events from SAP S/4HANA Cloud Public Edition to Microsoft Sentinel Solution for SAP** + +Connect using Basic authentication +**S/4HANA Cloud Public Edition connection** + +When you click the "Add account" button in the portal, a configuration form will open. You'll need to provide: + +*Account Details* + +- **Username** (optional): Enter your SAP S/4HANA Cloud username +- **Password** (optional): Enter your SAP S/4HANA Cloud password +- **SAP S/4HANA Cloud API URL** (optional): https://my123456-api.s4hana.cloud.sap + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + + +**3. S/4HANA Cloud Public Edition connections** + +Each row represents a connected S/4HANA Cloud Public Edition system +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **S/4HANA Cloud API endpoint** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/securepracticemailriskconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/securepracticemailriskconnector.md index 884e958818f..368c34f9506 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/securepracticemailriskconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/securepracticemailriskconnector.md @@ -10,4 +10,27 @@ The MailRisk by Secure Practice connector allows you to ingest email threat intelligence data from the MailRisk API into Microsoft Sentinel. This connector provides visibility into reported emails, risk assessments, and security events related to email threats. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **API credentials**: Your Secure Practice API key pair is also needed, which are created in the [settings in the admin portal](https://manage.securepractice.co/settings/security). Generate a new key pair with description `Microsoft Sentinel`. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Obtain Secure Practice API Credentials** + +Log in to your Secure Practice account and generate an API Key and API Secret if you haven't already. + +**2. Connect to MailRisk API** + +Enter your Secure Practice API credentials below. The credentials will be securely stored and used to authenticate API requests. +- **API Key**: Enter your Secure Practice API Key +- **API Secret**: (password field) +- Click 'Connect' to establish connection + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/securitybridge.md b/Tools/Solutions Analyzer/connector-docs/connectors/securitybridge.md index d5944599a59..e97df500b9c 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/securitybridge.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/securitybridge.md @@ -10,4 +10,45 @@ SecurityBridge enhances SAP security by integrating seamlessly with Microsoft Sentinel, enabling real-time monitoring and threat detection across SAP environments. This integration allows Security Operations Centers (SOCs) to consolidate SAP security events with other organizational data, providing a unified view of the threat landscape . Leveraging AI-powered analytics and Microsoft’s Security Copilot, SecurityBridge identifies sophisticated attack patterns and vulnerabilities within SAP applications, including ABAP code scanning and configuration assessments . The solution supports scalable deployments across complex SAP landscapes, whether on-premises, in the cloud, or hybrid environments . By bridging the gap between IT and SAP security teams, SecurityBridge empowers organizations to proactively detect, investigate, and respond to threats, enhancing overall security posture. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +**Custom Permissions:** +- **Microsoft Entra**: Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher. +- **Microsoft Azure**: Permission to assign Monitoring Metrics Publisher role on data collection rules. Typically requires Azure RBAC Owner or User Access Administrator role. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Create ARM Resources and Provide the Required Permissions** + +We will create data collection rule (DCR) and data collection endpoint (DCE) resources. We will also create a Microsoft Entra app registration and assign the required permissions to it. +#### Automated deployment of Azure resources +Clicking on "Deploy push connector resources" will trigger the creation of DCR and DCE resources. +It will then create a Microsoft Entra app registration with client secret and grant permissions on the DCR. This setup enables data to be sent securely to the DCR using a OAuth v2 client credentials. +- Deploy push connector resources + Application: SecurityBridge Solution for SAP + +**2. Maintain the data collection endpoint details and authentication info in SecurityBridge** + +Share the data collection endpoint URL and authentication info with the SecurityBridge administrator to configure the Securitybridge to send data to the data collection endpoint. + +Learn more from our KB Page https://abap-experts.atlassian.net/wiki/spaces/SB/pages/4099309579/REST+Push+Interface +- **Use this value to configure as Tenant ID in the LogIngestionAPI credential.**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Entra Application ID**: `ApplicationId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Entra Application Secret**: `ApplicationSecret` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Use this value to configure the LogsIngestionURL parameter when deploying the IFlow.**: `DataCollectionEndpoint` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **DCR Immutable ID**: `DataCollectionRuleId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Sentinel for SAP Stream ID**: `SAP_ABAPAUDITLOG` +- **SecurityBridge_CL Stream ID**: `Custom-SecurityBridge_CL` + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/securitybridgesap.md b/Tools/Solutions Analyzer/connector-docs/connectors/securitybridgesap.md index a475fc11a92..481814f6896 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/securitybridgesap.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/securitybridgesap.md @@ -10,4 +10,75 @@ SecurityBridge is the first and only holistic, natively integrated security platform, addressing all aspects needed to protect organizations running SAP from internal and external threats against their core business applications. The SecurityBridge platform is an SAP-certified add-on, used by organizations around the globe, and addresses the clients’ need for advanced cybersecurity, real-time monitoring, compliance, code security, and patching to protect against internal and external threats.This Microsoft Sentinel Solution allows you to integrate SecurityBridge Threat Detection events from all your on-premise and cloud based SAP instances into your security monitoring.Use this Microsoft Sentinel Solution to receive normalized and speaking security events, pre-built dashboards and out-of-the-box templates for your SAP security monitoring. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +*NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias SecurityBridgeLogs and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityBridge%20App/Parsers/SecurityBridgeLogs.txt).The function usually takes 10-15 minutes to activate after solution installation/update. + +>**NOTE:** This data connector has been developed using SecurityBridge Application Platform 7.4.0. + +**1. Install and onboard the agent for Linux or Windows** + +This solution requires logs collection via an Microsoft Sentinel agent installation + +> The Microsoft Sentinel agent is supported on the following Operating Systems: +1. Windows Servers +2. SUSE Linux Enterprise Server +3. Redhat Linux Enterprise Server +4. Oracle Linux Enterprise Server +5. If you have the SAP solution installed on HPUX / AIX then you will need to deploy a log collector on one of the Linux options listed above and forward your logs to that collector +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure the logs to be collected** + +Configure the custom log directory to be collected +- **Open custom logs settings** + +1. Select the link above to open your workspace advanced settings +2. Click **+Add custom** +3. Click **Browse** to upload a sample of a SecurityBridge SAP log file (e.g. AED_20211129164544.cef). Then, click **Next >** +4. Select **New Line** as the record delimiter then click **Next >** +5. Select **Windows** or **Linux** and enter the path to SecurityBridge logs based on your configuration. Example: + - '/usr/sap/tmp/sb_events/*.cef' + +>**NOTE:** You can add as many paths as you want in the configuration. + +6. After entering the path, click the '+' symbol to apply, then click **Next >** +7. Add **SecurityBridgeLogs** as the custom log Name and click **Done** + +**3. Check logs in Microsoft Sentinel** + +Open Log Analytics to check if the logs are received using the SecurityBridgeLogs_CL Custom log table. + +>**NOTE:** It may take up to 30 minutes before new logs will appear in SecurityBridgeLogs_CL table. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/securityevents.md b/Tools/Solutions Analyzer/connector-docs/connectors/securityevents.md index a54622adc33..d168e72636f 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/securityevents.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/securityevents.md @@ -10,4 +10,37 @@ You can stream all security events from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization’s network and improves your security operation capabilities. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220093&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. +- **Workspace data sources** (Workspace): read and write permissions. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Download and install the agent** + +> Security Events logs are collected only from **Windows** agents. +**Choose where to install the agent:** + +**Install agent on Azure Windows Virtual Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on non-Azure Windows Machine** + + Select the machine to install the agent and then click **Connect**. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Select which events to stream** + +- All events - All Windows security and AppLocker events. +- Common - A standard set of events for auditing purposes. +- Minimal - A small set of events that might indicate potential threats. By enabling this option, you won't be able to have a full audit trail. +- None - No security or AppLocker events. +- Configure SecurityEvents data connector + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/securityscorecardfactorazurefunctions.md b/Tools/Solutions Analyzer/connector-docs/connectors/securityscorecardfactorazurefunctions.md index 220b1dcec49..a1328591ac9 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/securityscorecardfactorazurefunctions.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/securityscorecardfactorazurefunctions.md @@ -10,4 +10,117 @@ SecurityScorecard is the leader in cybersecurity risk ratings. The [SecurityScorecard](https://www.SecurityScorecard.com/) Factors data connector provides the ability for Sentinel to import SecurityScorecard factor ratings as logs. SecurityScorecard provides ratings for over 12 million companies and domains using countless data points from across the internet. Maintain full awareness of any company's security posture and be able to receive timely updates when factor scores change or drop. SecurityScorecard factor ratings are updated daily based on evidence collected across the web. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **SecurityScorecard API Key** is required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the SecurityScorecard API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Configuration steps for the SecurityScorecard API** + + Follow these instructions to create/get a SecurityScorecard API token. + 1. As an administrator in SecurityScorecard, navigate to My Settings and then Users + 2. Click '+ Add User' + 3. In the form, check off 'Check to create a bot user' + 4. Provide a name for the Bot and provide it with Read Only permission + 5. Click 'Add User' + 6. Locate the newly created Bot user + 7. Click 'create token' in the Bot user's row + 8. Click 'Confirm' and note the API token that has been generated + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the SecurityScorecard Factor data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the SecurityScorecard API Authorization Key(s) +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the SecurityScorecard Factor connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SecurityScorecardFactorAPI-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the below information : + Function Name + Workspace ID + Workspace Key + SecurityScorecard API Key + SecurityScorecard Base URL (https://api.securityscorecard.io) + Domain + Portfolio IDs (Coma separated IDs) + SecurityScorecard Factor Table Name (Default: SecurityScorecardFactor) + Level Factor Change (Default: 7) + Factor Schedule (Default: 0 15 * * * *) + Diff Override Own Factor (Default: true) + Diff Override Portfolio Factor (Default: true) +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the SecurityScorecard Factor data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-SecurityScorecardFactorAPI-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. SecurityScorecardXXXXX). + + e. **Select a runtime:** Choose Python 3.8 or above. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective values (case-sensitive): + Workspace ID + Workspace Key + SecurityScorecard API Key + SecurityScorecard Base URL (https://api.securityscorecard.io) + Domain + Portfolio IDs (Coma separated IDs) + SecurityScorecard Factor Table Name (Default: SecurityScorecardFactor) + Level Factor Change (Default: 7) + Factor Schedule (Default: 0 15 * * * *) + Diff Override Own Factor (Default: true) + Diff Override Portfolio Factor (Default: true) + logAnalyticsUri (optional) + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/securityscorecardissueazurefunctions.md b/Tools/Solutions Analyzer/connector-docs/connectors/securityscorecardissueazurefunctions.md index af0340a99b8..9268c40abc4 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/securityscorecardissueazurefunctions.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/securityscorecardissueazurefunctions.md @@ -10,4 +10,113 @@ SecurityScorecard is the leader in cybersecurity risk ratings. The [SecurityScorecard](https://www.SecurityScorecard.com/) Issues data connector provides the ability for Sentinel to import SecurityScorecard issue data as logs. SecurityScorecard provides ratings for over 12 million companies and domains using countless data points from across the internet. Maintain full awareness of any company's security posture and be able to receive timely updates when new cybersecurity issues are discovered. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **SecurityScorecard API Key** is required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the SecurityScorecard API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Configuration steps for the SecurityScorecard API** + + Follow these instructions to create/get a SecurityScorecard API token. + 1. As an administrator in SecurityScorecard, navigate to My Settings and then Users + 2. Click '+ Add User' + 3. In the form, check off 'Check to create a bot user' + 4. Provide a name for the Bot and provide it with Read Only permission + 5. Click 'Add User' + 6. Locate the newly created Bot user + 7. Click 'create token' in the Bot user's row + 8. Click 'Confirm' and note the API token that has been generated + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the SecurityScorecard Issue data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the SecurityScorecard API Authorization Key(s) +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the SecurityScorecard Issue connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SecurityScorecardIssueAPI-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the below information : + Function Name + Workspace ID + Workspace Key + SecurityScorecard API Key + SecurityScorecard Base URL (https://api.securityscorecard.io) + Domain + Portfolio IDs (Coma separated IDs) + SecurityScorecard Issue Table Name (Default: SecurityScorecardIssue) + Level Issue Change (Default: 7) + Issue Schedule (Default: 0 0,30 * * * *) +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the SecurityScorecard Issue data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-SecurityScorecardIssueAPI-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. SecurityScorecardXXXXX). + + e. **Select a runtime:** Choose Python 3.8 or above. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective values (case-sensitive): + Workspace ID + Workspace Key + SecurityScorecard API Key + SecurityScorecard Base URL (https://api.securityscorecard.io) + Domain + Portfolio IDs (Coma separated IDs) + SecurityScorecard Issue Table Name (Default: SecurityScorecardIssue) + Level Issue Change (Default: 7) + Issue Schedule (Default: 0 0,30 * * * *) + logAnalyticsUri (optional) + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/securityscorecardratingsazurefunctions.md b/Tools/Solutions Analyzer/connector-docs/connectors/securityscorecardratingsazurefunctions.md index dcdd11fd59f..e02a0765822 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/securityscorecardratingsazurefunctions.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/securityscorecardratingsazurefunctions.md @@ -10,4 +10,117 @@ SecurityScorecard is the leader in cybersecurity risk ratings. The [SecurityScorecard](https://www.SecurityScorecard.com/) data connector provides the ability for Sentinel to import SecurityScorecard ratings as logs. SecurityScorecard provides ratings for over 12 million companies and domains using countless data points from across the internet. Maintain full awareness of any company's security posture and be able to receive timely updates when scores change or drop. SecurityScorecard ratings are updated daily based on evidence collected across the web. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **SecurityScorecard API Key** is required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the SecurityScorecard API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Configuration steps for the SecurityScorecard API** + + Follow these instructions to create/get a SecurityScorecard API token. + 1. As an administrator in SecurityScorecard, navigate to My Settings and then Users + 2. Click '+ Add User' + 3. In the form, check off 'Check to create a bot user' + 4. Provide a name for the Bot and provide it with Read Only permission + 5. Click 'Add User' + 6. Locate the newly created Bot user + 7. Click 'create token' in the Bot user's row + 8. Click 'Confirm' and note the API token that has been generated + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the SecurityScorecard Ratings data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the SecurityScorecard API Authorization Key(s) +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the SecurityScorecard Ratings connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SecurityScorecardRatingsAPI-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the below information : + Function Name + Workspace ID + Workspace Key + SecurityScorecard API Key + SecurityScorecard Base URL (https://api.securityscorecard.io) + Domain + Portfolio IDs (Coma separated IDs) + SecurityScorecard Ratings Table Name (Default: SecurityScorecardRatings) + Level Ratings Change (Default: 7) + Ratings Schedule (Default: 0 45 * * * *) + Diff Override Own Ratings (Default: true) + Diff Override Portfolio Ratings (Default: true) +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the SecurityScorecard Ratings data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-SecurityScorecardRatingsAPI-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. SecurityScorecardXXXXX). + + e. **Select a runtime:** Choose Python 3.8 or above. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective values (case-sensitive): + Workspace ID + Workspace Key + SecurityScorecard API Key + SecurityScorecard Base URL (https://api.securityscorecard.io) + Domain + Portfolio IDs (Coma separated IDs) + SecurityScorecard Ratings Table Name (Default: SecurityScorecardRatings) + Level Ratings Change (Default: 7) + Ratings Schedule (Default: 0 45 * * * *) + Diff Override Own Ratings (Default: true) + Diff Override Portfolio Ratings (Default: true) + logAnalyticsUri (optional) + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/semperisdsp.md b/Tools/Solutions Analyzer/connector-docs/connectors/semperisdsp.md index 45126985c78..54a8331df53 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/semperisdsp.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/semperisdsp.md @@ -12,4 +12,72 @@ Semperis Directory Services Protector data connector allows for the export of it It provides a data parser to manipulate the Windows event logs more easily. The different workbooks ease your Active Directory security monitoring and provide different ways to visualize the data. The analytic templates allow to automate responses regarding different events, exposures, or attacks. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**dsp_parser**](https://aka.ms/sentinel-SemperisDSP-parser) which is deployed with the Microsoft Sentinel Solution. + +**2. **Configure Windows Security Events via AMA connector**** + +Collect Windows security events logs from your **Semperis DSP Management Server** . + +**1. Install the Azure Monitor Agent (AMA)** + +On your **Semperis DSP Management Server** install the AMA on the DSP machine that will act as the event log forwarder. +You can skip this step if you have already installed the Microsoft agent for Windows + +**2. Create a Data Collection Rule (DCR)** + +Start collecting logs from the **Semperis DSP Management Server** . + +1. In the Azure portal, navigate to your **Log Analytics workspace**. +2. In the left pane, click on **Configuration** and then **Data connectors**. +3. Find and install the **the Windows Security Events via AMA** connector. +4. Click on **Open connector** and then on **Create data collection rule**. +5. Configure the DCR with the necessary details, such as the log sources and the destination workspace. +**Choose where to install the agent:** + +**Install agent on Semperis DSP Management Server** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**3. **Configure Common Event Format via AMA connector**** + +Collect syslog messages send from your **Semperis DSP Management Server** . + +**1. Install the Azure Monitor Agent (AMA)** + +Install the AMA on the Linux machine that will act as the log forwarder. This machine will collect and forward CEF logs to Microsoft Sentinel. +You can skip this step if you have already installed the Microsoft agent for Linux + +**2. Create a Data Collection Rule (DCR)** + +Start collecting logs from the **Semperis DSP Management Server** . + +1. In the Azure portal, navigate to your **Log Analytics workspace**. +2. In the left pane, click on **Configuration** and then **Data connectors**. +3. Find and install the **the Common Event Format via AMA** connector. +4. Click on **Open connector** and then on **Create data collection rule**. +5. Configure the DCR with the necessary details, such as the log sources and the destination workspace. +**Choose where to install the agent:** + +**Install agent on Semperis DSP Management Server** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**3. Configure sending CEF logs on your Semperis DSP Management Server** + +Configure your **Semperis DSP Management Server** to send CEF logs to the Linux machine where the AMA is installed. This involves setting the destination IP address and port for the CEF logs + +> You should now be able to receive logs in the *Windows event log* table and *common log* table, log data can be parsed using the **dsp_parser()** function, used by all query samples, workbooks and analytic templates. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/senservapro.md b/Tools/Solutions Analyzer/connector-docs/connectors/senservapro.md index eebec67ec60..51fd60d31dd 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/senservapro.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/senservapro.md @@ -10,4 +10,22 @@ The SenservaPro data connector provides a viewing experience for your SenservaPro scanning logs. View dashboards of your data, use queries to hunt & explore, and create custom alerts. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Setup the data connection** + +Visit [Senserva Setup](https://www.senserva.com/senserva-microsoft-sentinel-edition-setup/) for information on setting up the Senserva data connection, support, or any other questions. The Senserva installation will configure a Log Analytics Workspace for output. Deploy Microsoft Sentinel onto the configured Log Analytics Workspace to finish the data connection setup by following [this onboarding guide.](https://docs.microsoft.com/azure/sentinel/quickstart-onboard) +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/sentinelone.md b/Tools/Solutions Analyzer/connector-docs/connectors/sentinelone.md index 4751d8204f8..327cc01f78e 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/sentinelone.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/sentinelone.md @@ -10,4 +10,108 @@ The [SentinelOne](https://www.sentinelone.com/) data connector provides the capability to ingest common SentinelOne server objects such as Threats, Agents, Applications, Activities, Policies, Groups, and more events into Microsoft Sentinel through the REST API. Refer to API documentation: `https://.sentinelone.net/api-doc/overview` for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **SentinelOneAPIToken** is required. See the documentation to learn more about API on the `https://.sentinelone.net/api-doc/overview`. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the SentinelOne API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias SentinelOne and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne/Parsers/SentinelOne.txt). The function usually takes 10-15 minutes to activate after solution installation/update. + +**STEP 1 - Configuration steps for the SentinelOne API** + + Follow the instructions to obtain the credentials. + +1. Log in to the SentinelOne Management Console with Admin user credentials. +2. In the Management Console, click **Settings**. +3. In the **SETTINGS** view, click **USERS** +4. Click **New User**. +5. Enter the information for the new console user. +5. In Role, select **Admin**. +6. Click **SAVE** +7. Save credentials of the new user for using in the data connector. + +**NOTE :-** Admin access can be delegated using custom roles. Please review SentinelOne [documentation](https://www.sentinelone.com/blog/feature-spotlight-fully-custom-role-based-access-control/) to learn more about custom RBAC. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the SentinelOne data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**4. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the SentinelOne Audit data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SentinelOneAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-SentinelOneAPI-azuredeploy-gov) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. +3. Enter the **SentinelOneAPIToken**, **SentinelOneUrl** `(https://.sentinelone.net)` and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**5. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the SentinelOne Reports data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-SentinelOneAPI-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. SOneXXXXX). + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + + 1. In the Function App, select the Function App Name and select **Configuration**. + + 2. In the **Application settings** tab, select ** New application setting**. + + 3. Add each of the following application settings individually, with their respective string values (case-sensitive): + SentinelOneAPIToken + SentinelOneUrl + WorkspaceID + WorkspaceKey + logAnalyticsUri (optional) + +> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. + + 4. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/sentineloneccp.md b/Tools/Solutions Analyzer/connector-docs/connectors/sentineloneccp.md index 06c2ba35836..7274898d731 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/sentineloneccp.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/sentineloneccp.md @@ -10,4 +10,29 @@ The [SentinelOne](https://usea1-nessat.sentinelone.net/api-doc/overview) data connector allows ingesting logs from the SentinelOne API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the SentinelOne API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +#### Configuration steps for the SentinelOne API + Follow the instructions to obtain the credentials. You can also follow the [guide](https://usea1-nessat.sentinelone.net/docs/en/how-to-automate-api-token-generation.html#how-to-automate-api-token-generation) to generate API key. +#### 1. Retrieve SentinelOne Management URL + 1.1. Log in to the SentinelOne [**Management Console**] with Admin user credentials + 1.2. In the [**Management Console**] copy the URL link above without the URL path. +#### 2. Retrieve API Token + 2.1. Log in to the SentinelOne [**Management Console**] with Admin user credentials + 2.2. In the [**Management Console**], click [**Settings**] + 2.3. In [**Settings**] view click on [**USERS**]. + 2.4. In the [**USERS**] Page click on [**Service Users**] -> [**Actions**] -> [**Create new service user**]. + 2.5. Choose [**Expiration date**] and [**scope**] (by site) and click on [**Create User**]. + 2.6. Once the [**Service User**] is created copy the [**API Token**] from page and press [**Save**] +- **SentinelOne Management URL**: https://example.sentinelone.net/ +- **API Token**: API Token +- Click 'Connect' to establish connection + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/seraphicwebsecurity.md b/Tools/Solutions Analyzer/connector-docs/connectors/seraphicwebsecurity.md index f000162388c..2f960f62773 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/seraphicwebsecurity.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/seraphicwebsecurity.md @@ -10,4 +10,21 @@ The Seraphic Web Security data connector provides the capability to ingest [Seraphic Web Security](https://seraphicsecurity.com/) events and alerts into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Custom Permissions:** +- **Seraphic API key**: API key for Microsoft Sentinel connected to your Seraphic Web Security tenant. To get this API key for your tenant - [read this documentation](https://constellation.seraphicsecurity.com/integrations/microsoft_sentinel/Guidance/MicrosoftSentinel-IntegrationGuide-230822.pdf). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Seraphic Web Security** + +Please insert the integration name, the Seraphic integration URL and your workspace name for Microsoft Sentinel: +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `APIKey`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/sevcodevices.md b/Tools/Solutions Analyzer/connector-docs/connectors/sevcodevices.md index f7203cea44a..36a12a2813e 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/sevcodevices.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/sevcodevices.md @@ -14,4 +14,24 @@ The Sevco Platform - Devices connector allows you to easily connect your Sevco D [For more information >​](https://docs.sev.co/docs/microsoft-sentinel-inventory) +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Configure and connect to Sevco** + +The Sevco Platform can integrate with and export assets directly to Microsoft Sentinel..​ + +1. Go to [Sevco - Microsoft Sentinel Integration](https://docs.sev.co/docs/microsoft-sentinel-inventory), and follow the instructions, using the parameters below to set up the connection:. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/silverfortama.md b/Tools/Solutions Analyzer/connector-docs/connectors/silverfortama.md index 910252fd761..4e5b461518a 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/silverfortama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/silverfortama.md @@ -14,4 +14,57 @@ The [Silverfort](https://silverfort.com) ITDR Admin Console connector solution a Please contact Silverfort or consult the Silverfort documentation for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Common Event Format (CEF) logs to Syslog agent** + +Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/slackaudit.md b/Tools/Solutions Analyzer/connector-docs/connectors/slackaudit.md index 5f4786adf6e..5a8c51668d8 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/slackaudit.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/slackaudit.md @@ -10,4 +10,22 @@ The [Slack](https://slack.com) data connector provides the capability to ingest [Slack Audit Records](https://api.slack.com/admins/audit-logs) events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://api.slack.com/admins/audit-logs#the_audit_event) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more. This data connector uses Microsoft Sentinel native polling capability. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +**Custom Permissions:** +- **Slack API credentials**: **SlackAPIBearerToken** is required for REST API. [See the documentation to learn more about API](https://api.slack.com/web#authentication). Check all [requirements and follow the instructions](https://api.slack.com/web#authentication) for obtaining credentials. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Slack to Microsoft Sentinel** + +Enable Slack audit Logs. +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `APIKey`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/slackauditapi.md b/Tools/Solutions Analyzer/connector-docs/connectors/slackauditapi.md index 2791b636f56..039bc121790 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/slackauditapi.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/slackauditapi.md @@ -10,4 +10,71 @@ The [Slack](https://slack.com) Audit data connector provides the capability to ingest [Slack Audit Records](https://api.slack.com/admins/audit-logs) events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://api.slack.com/admins/audit-logs#the_audit_event) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

+## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **SlackAPIBearerToken** is required for REST API. [See the documentation to learn more about API](https://api.slack.com/web#authentication). Check all [requirements and follow the instructions](https://api.slack.com/web#authentication) for obtaining credentials. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Slack REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-SlackAuditAPI-parser) to create the Kusto functions alias, **SlackAudit** + +**STEP 1 - Configuration steps for the Slack API** + + [Follow the instructions](https://api.slack.com/web#authentication) to obtain the credentials. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Slack Audit data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Option 1 - Azure Resource Manager (ARM) Template** + + Use this method for automated deployment of the Slack Audit data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SlackAuditAPI-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. +3. Enter the **SlackAPIBearerToken** and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + + **Option 2 - Manual Deployment of Azure Functions** + + Use the following step-by-step instructions to deploy the Slack Audit data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + 1. Download the [Azure Function App](https://aka.ms/sentinel-SlackAuditAPI-functionapp) file. Extract archive to your local development computer. +2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode. +3. After successful deployment of the function app, follow next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. Go to Azure Portal for the Function App configuration. +2. In the Function App, select the Function App Name and select **Configuration**. +3. In the **Application settings** tab, select ** New application setting**. +4. Add each of the following application settings individually, with their respective string values (case-sensitive): + SlackAPIBearerToken + WorkspaceID + WorkspaceKey + logAnalyticsUri (optional) +> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +3. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/slackauditlogsccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/slackauditlogsccpdefinition.md index e2d0b5fbfff..3800d9f82c9 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/slackauditlogsccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/slackauditlogsccpdefinition.md @@ -10,4 +10,46 @@ The SlackAudit data connector provides the capability to ingest [Slack Audit logs](https://api.slack.com/admins/audit-logs) into Microsoft Sentinel through the REST API. Refer to [API documentation](https://api.slack.com/admins/audit-logs-call) for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **UserName, SlackAudit API Key & Action Type**: To Generate the Access Token, create a new application in Slack, then add necessary scopes and configure the redirect URL. For detailed instructions on generating the access token, user name and action name limit, refer the [link](https://github.com/v-gsrihitha/v-gsrihitha/blob/main/SlackAudit/Readme.md). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect SlackAudit to Microsoft Sentinel** + +To ingest data from SlackAudit to Microsoft Sentinel, you have to click on Add Domain button below then you get a pop up to fill the details, provide the required information and click on Connect. You can see the usernames, actions connected in the grid. +> +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **UserName** +- **Actions** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + +**Add domain** + +*Add domain* + +When you click the "Add domain" button in the portal, a configuration form will open. You'll need to provide: + +- **UserName** (optional): Enter your User Name +- **SlackAudit API Key** (optional): Enter your API KEY +- **SlackAudit Action Type** (optional): Enter the Action Type + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/snowflakedataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/snowflakedataconnector.md index 6f2808ae74c..b058afa8ab1 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/snowflakedataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/snowflakedataconnector.md @@ -14,4 +14,105 @@ The Snowflake data connector provides the capability to ingest Snowflake [login

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

+## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Snowflake Credentials**: **Snowflake Account Identifier**, **Snowflake User** and **Snowflake Password** are required for connection. See the documentation to learn more about [Snowflake Account Identifier](https://docs.snowflake.com/en/user-guide/admin-account-identifier.html#). Instructions on how to create user for this connector you can find below. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Azure Blob Storage API to pull logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Snowflake**](https://aka.ms/sentinel-SnowflakeDataConnector-parser) which is deployed with the Microsoft Sentinel Solution. + +**STEP 1 - Creating user in Snowflake** + +To query data from Snowflake you need a user that is assigned to a role with sufficient privileges and a virtual warehouse cluster. The initial size of this cluster will be set to small but if it is insufficient, the cluster size can be increased as necessary. + +1. Enter the Snowflake console. +2. Switch role to SECURITYADMIN and [create a new role](https://docs.snowflake.com/en/sql-reference/sql/create-role.html): +``` +USE ROLE SECURITYADMIN; +CREATE OR REPLACE ROLE EXAMPLE_ROLE_NAME;``` +3. Switch role to SYSADMIN and [create warehouse](https://docs.snowflake.com/en/sql-reference/sql/create-warehouse.html) and [grand access](https://docs.snowflake.com/en/sql-reference/sql/grant-privilege.html) to it: +``` +USE ROLE SYSADMIN; +CREATE OR REPLACE WAREHOUSE EXAMPLE_WAREHOUSE_NAME + WAREHOUSE_SIZE = 'SMALL' + AUTO_SUSPEND = 5 + AUTO_RESUME = true + INITIALLY_SUSPENDED = true; +GRANT USAGE, OPERATE ON WAREHOUSE EXAMPLE_WAREHOUSE_NAME TO ROLE EXAMPLE_ROLE_NAME;``` +4. Switch role to SECURITYADMIN and [create a new user](https://docs.snowflake.com/en/sql-reference/sql/create-user.html): +``` +USE ROLE SECURITYADMIN; +CREATE OR REPLACE USER EXAMPLE_USER_NAME + PASSWORD = 'example_password' + DEFAULT_ROLE = EXAMPLE_ROLE_NAME + DEFAULT_WAREHOUSE = EXAMPLE_WAREHOUSE_NAME +;``` +5. Switch role to ACCOUNTADMIN and [grant access to snowflake database](https://docs.snowflake.com/en/sql-reference/account-usage.html#enabling-account-usage-for-other-roles) for role. +``` +USE ROLE ACCOUNTADMIN; +GRANT IMPORTED PRIVILEGES ON DATABASE SNOWFLAKE TO ROLE EXAMPLE_ROLE_NAME;``` +6. Switch role to SECURITYADMIN and [assign role](https://docs.snowflake.com/en/sql-reference/sql/grant-role.html) to user: +``` +USE ROLE SECURITYADMIN; +GRANT ROLE EXAMPLE_ROLE_NAME TO USER EXAMPLE_USER_NAME;``` + +>**IMPORTANT:** Save user and API password created during this step as they will be used during deployment step. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Snowflake credentials, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Option 1 - Azure Resource Manager (ARM) Template** + + Use this method for automated deployment of the data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SnowflakeDataConnector-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Snowflake Account Identifier**, **Snowflake User**, **Snowflake Password**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key** +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + + **Option 2 - Manual Deployment of Azure Functions** + + Use the following step-by-step instructions to deploy the data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + 1. Download the [Azure Function App](https://aka.ms/sentinel-SnowflakeDataConnector-functionapp) file. Extract archive to your local development computer. +2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode. +3. After successful deployment of the function app, follow next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. Go to Azure Portal for the Function App configuration. +2. In the Function App, select the Function App Name and select **Configuration**. +3. In the **Application settings** tab, select **+ New application setting**. +4. Add each of the following application settings individually, with their respective string values (case-sensitive): + SNOWFLAKE_ACCOUNT + SNOWFLAKE_USER + SNOWFLAKE_PASSWORD + WORKSPACE_ID + SHARED_KEY + logAnalyticsUri (optional) + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/snowflakelogsccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/snowflakelogsccpdefinition.md index d9da95099c1..734454229c3 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/snowflakelogsccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/snowflakelogsccpdefinition.md @@ -10,4 +10,46 @@ The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Snowflake to Microsoft Sentinel** +>**NOTE:** To ensure data is presented in separate columns for each field, execute the parser using the **Snowflake()** function +To gather data from Snowflake, you need to provide the following resources +#### 1. Account Identifier + To gather data from Snowflake, you'll need Snowflake Account Identifier. +#### 2. Programmatic Access Token + To gather data from Snowflake, you'll need the Snowflake Programmatic Access Token +For detailed instructions on retrieving the Account Identifier and Programmatic Access Token, please refer to the [Connector Tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/Readme.md). +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Account-Identifier** +- **Table Name** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + +**Add Account** + +*Add Account* + +When you click the "Add Account" button in the portal, a configuration form will open. You'll need to provide: + +- **Snowflake Account Identifier** (required): Enter Snowflake Account Identifier +- **Snowflake PAT** (required): Enter Snowflake PAT + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/socprimeauditlogsdataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/socprimeauditlogsdataconnector.md new file mode 100644 index 00000000000..b89c6e20915 --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/connectors/socprimeauditlogsdataconnector.md @@ -0,0 +1,34 @@ +# SOC Prime Platform Audit Logs Data Connector + +| | | +|----------|-------| +| **Connector ID** | `SOCPrimeAuditLogsDataConnector` | +| **Publisher** | Microsoft | +| **Tables Ingested** | [`SOCPrimeAuditLogs_CL`](../tables-index.md#socprimeauditlogs_cl) | +| **Used in Solutions** | [SOC Prime CCF](../solutions/soc-prime-ccf.md) | +| **Connector Definition Files** | [SOCPrime_DataConnectorDefinition.json](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SOC%20Prime%20CCF/Data%20Connectors/SOCPrime_ccp/SOCPrime_DataConnectorDefinition.json) | + +The [SOC Prime Audit Logs](https://help.socprime.com/en/articles/6265791-api) data connector allows ingesting logs from the SOC Prime Platform API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the SOC Prime Platform API to fetch SOC Prime platform audit logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table, thus resulting in better performance. + +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +#### Configuration steps for the SOC Prime Platform API + Follow the instructions to obtain the credentials. you can also follow this [guide](https://help.socprime.com/en/articles/6265791-api#h_8a0d20b204) to generate personal API key. +#### Retrieve API Key + 1. Log in to the SOC Prime Platform + 2. Click [**Account**] icon -> [**Platform Settings**] -> [**API**] + 3. Click [**Add New Key**] + 4. In the modal that appears give your key a meaningful name, set expiration date and product APIs the key provides access to + 5. Click on [**Generate**] + 6. Copy the key and save it in a safe place. You won't be able to view it again once you close this modal +- **SOC Prime API Key**: (password field) +- Click 'Connect' to establish connection + +[← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/sonicwallfirewall.md b/Tools/Solutions Analyzer/connector-docs/connectors/sonicwallfirewall.md index 825e244e559..709cf557d2d 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/sonicwallfirewall.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/sonicwallfirewall.md @@ -10,4 +10,59 @@ Common Event Format (CEF) is an industry standard format on top of Syslog messages, used by SonicWall to allow event interoperability among different platforms. By connecting your CEF logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward SonicWall Firewall Common Event Format (CEF) logs to Syslog agent** + +Set your SonicWall Firewall to send Syslog messages in CEF format to the proxy machine. Make sure you send the logs to port 514 TCP on the machine's IP address. + + Follow Instructions . Then Make sure you select local use 4 as the facility. Then select ArcSight as the Syslog format. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/sonicwallfirewallama.md b/Tools/Solutions Analyzer/connector-docs/connectors/sonicwallfirewallama.md index 03852f1d47e..f139da3084e 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/sonicwallfirewallama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/sonicwallfirewallama.md @@ -10,4 +10,60 @@ Common Event Format (CEF) is an industry standard format on top of Syslog messages, used by SonicWall to allow event interoperability among different platforms. By connecting your CEF logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward SonicWall Firewall Common Event Format (CEF) logs to Syslog agent** + + Set your SonicWall Firewall to send Syslog messages in CEF format to the proxy machine. Make sure you send the logs to port 514 TCP on the machine's IP address. + + Follow Instructions . Then Make sure you select local use 4 as the facility. Then select ArcSight as the Syslog format. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/sonraidataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/sonraidataconnector.md index 9aa8a4dd3d7..53f3ec76ea4 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/sonraidataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/sonraidataconnector.md @@ -10,4 +10,25 @@ Use this data connector to integrate with Sonrai Security and get Sonrai tickets sent directly to Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Sonrai Security Data Connector** + +1. Navigate to Sonrai Security dashboard. +2. On the bottom left panel, click on integrations. +3. Select Microsoft Sentinel from the list of available Integrations. +4. Fill in the form using the information provided below. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/sophoscloudoptix.md b/Tools/Solutions Analyzer/connector-docs/connectors/sophoscloudoptix.md index 99f4fdf6a73..292295fe03b 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/sophoscloudoptix.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/sophoscloudoptix.md @@ -10,4 +10,34 @@ The [Sophos Cloud Optix](https://www.sophos.com/products/cloud-optix.aspx) connector allows you to easily connect your Sophos Cloud Optix logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's cloud security and compliance posture and improves your cloud security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Get the Workspace ID and the Primary Key** + +Copy the Workspace ID and Primary Key for your workspace. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**2. Configure the Sophos Cloud Optix Integration** + +In Sophos Cloud Optix go to [Settings->Integrations->Microsoft Sentinel](https://optix.sophos.com/#/integrations/sentinel) and enter the Workspace ID and Primary Key copied in Step 1. + +**3. Select Alert Levels** + +In Alert Levels, select which Sophos Cloud Optix alerts you want to send to Microsoft Sentinel. + +**4. Turn on the integration** + +To turn on the integration, select Enable, and then click Save. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/sophosendpointprotectionccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/sophosendpointprotectionccpdefinition.md index 1ba5ed0737f..0a3c4134f07 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/sophosendpointprotectionccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/sophosendpointprotectionccpdefinition.md @@ -10,4 +10,39 @@ The [Sophos Endpoint Protection](https://www.sophos.com/en-us/products/endpoint-antivirus.aspx) data connector provides the capability to ingest [Sophos events](https://developer.sophos.com/docs/siem-v1/1/routes/events/get) and [Sophos alerts](https://developer.sophos.com/docs/siem-v1/1/routes/alerts/get) into Microsoft Sentinel. Refer to [Sophos Central Admin documentation](https://docs.sophos.com/central/Customer/help/en-us/central/Customer/concepts/Logs.html) for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **Sophos Endpoint Protection API access**: Access to the Sophos Endpoint Protection API through a service principal is required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect to Sophos Endpoint Protection API to start collecting event and alert logs in Microsoft Sentinel** + +Follow [Sophos instructions](https://developer.sophos.com/getting-started-tenant) to create a service principal with access to the Sophos API. It will need the Service Principal ReadOnly role. + Through those instructions, you should get the Client ID, Client Secret, Tenant ID and data region. + Fill the form bellow with that information. +- **Sophos Tenant ID**: Sophos Tenant ID +- **Sophos Tenant Data Region**: eu01, eu02, us01, us02 or us03 +- **OAuth Configuration**: + - Client ID + - Client Secret + - Click 'Connect' to authenticate +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Name** +- **ID** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/sophosep.md b/Tools/Solutions Analyzer/connector-docs/connectors/sophosep.md index 608fc8163ac..1b9f8da9c2d 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/sophosep.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/sophosep.md @@ -10,4 +10,78 @@ The [Sophos Endpoint Protection](https://www.sophos.com/en-us/products/endpoint-antivirus.aspx) data connector provides the capability to ingest [Sophos events](https://docs.sophos.com/central/Customer/help/en-us/central/Customer/common/concepts/Events.html) into Microsoft Sentinel. Refer to [Sophos Central Admin documentation](https://docs.sophos.com/central/Customer/help/en-us/central/Customer/concepts/Logs.html) for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **API token** is required. [See the documentation to learn more about API token](https://docs.sophos.com/central/Customer/help/en-us/central/Customer/concepts/ep_ApiTokenManagement.html) + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Sophos Central APIs to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**SophosEPEvent**](https://aka.ms/sentinel-SophosEP-parser) which is deployed with the Microsoft Sentinel Solution. + +**STEP 1 - Configuration steps for the Sophos Central API** + + Follow the instructions to obtain the credentials. + +1. In Sophos Central Admin, go to **Global Settings > API Token Management**. +2. To create a new token, click **Add token** from the top-right corner of the screen. +3. Select a **token name** and click **Save**. The **API Token Summary** for this token is displayed. +4. Click **Copy** to copy your **API Access URL + Headers** from the **API Token Summary** section into your clipboard. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Sophos Endpoint Protection data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Option 1 - Azure Resource Manager (ARM) Template** + + Use this method for automated deployment of the Sophos Endpoint Protection data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SophosEP-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. +3. Enter the **Sophos API Access URL and Headers**, **AzureSentinelWorkspaceId**, **AzureSentinelSharedKey**. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + + **Option 2 - Manual Deployment of Azure Functions** + + Use the following step-by-step instructions to deploy the Sophos Endpoint Protection data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-SophosEP-functionapp) file. Extract archive to your local development computer. +2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode. +3. After successful deployment of the function app, follow next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. Go to Azure Portal for the Function App configuration. +2. In the Function App, select the Function App Name and select **Configuration**. +3. In the **Application settings** tab, select **New application setting**. +4. Add each of the following application settings individually, with their respective string values (case-sensitive): + SOPHOS_TOKEN + WorkspaceID + WorkspaceKey + logAnalyticsUri (optional) +> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +5. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/sophosxgfirewall.md b/Tools/Solutions Analyzer/connector-docs/connectors/sophosxgfirewall.md index d6b32eee30a..211e0bf1673 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/sophosxgfirewall.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/sophosxgfirewall.md @@ -10,4 +10,47 @@ The [Sophos XG Firewall](https://www.sophos.com/products/next-gen-firewall.aspx) allows you to easily connect your Sophos XG Firewall logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigations. Integrating Sophos XG Firewall with Microsoft Sentinel provides more visibility into your organization's firewall traffic and will enhance security monitoring capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +**Custom Permissions:** +- **Sophos XG Firewall**: must be configured to export logs via Syslog + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Sophos XG Firewall and load the function code or click [here](https://aka.ms/sentinel-SophosXG-parser), on the second line of the query, enter the hostname(s) of your Sophos XG Firewall device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. + 2. Select **Apply below configuration to my machines** and select the facilities and severities. + 3. Click **Save**. +- **Open Syslog settings** + +**3. Configure and connect the Sophos XG Firewall** + +[Follow these instructions](https://doc.sophos.com/nsg/sophos-firewall/20.0/Help/en-us/webhelp/onlinehelp/AdministratorHelp/SystemServices/LogSettings/SyslogServerAdd/index.html) to enable syslog streaming. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/squadratechnologiessecrmm.md b/Tools/Solutions Analyzer/connector-docs/connectors/squadratechnologiessecrmm.md index 558fb60e449..297d709c32f 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/squadratechnologiessecrmm.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/squadratechnologiessecrmm.md @@ -10,4 +10,20 @@ Use the Squadra Technologies secRMM Data Connector to push USB removable storage security event data into Microsoft Sentinel Log Analytics. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Follow the step-by-step instructions provided in the [Squadra Technologies configuration guide for Azure Sentinel](https://www.squadratechnologies.com/StaticContent/ProductDownload/secRMM/9.11.0.0/secRMMAzureSentinelAdministratorGuide.pdf) +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/squidproxy.md b/Tools/Solutions Analyzer/connector-docs/connectors/squidproxy.md index 1fdccbd10ed..b430b937952 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/squidproxy.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/squidproxy.md @@ -10,4 +10,60 @@ The [Squid Proxy](http://www.squid-cache.org/) connector allows you to easily connect your Squid Proxy logs with Microsoft Sentinel. This gives you more insight into your organization's network proxy traffic and improves your security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Squid Proxy and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SquidProxy/Parsers/SquidProxy.txt), on the second line of the query, enter the hostname(s) of your SquidProxy device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux or Windows** + +Install the agent on the Squid Proxy server where the logs are generated. + +> Logs from Squid Proxy deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure the logs to be collected** + +Configure the custom log directory to be collected +- **Open custom logs settings** + +1. Select the link above to open your workspace advanced settings +2. From the left pane, select **Data**, select **Custom Logs** and click **Add+** +3. Click **Browse** to upload a sample of a Squid Proxy log file(e.g. access.log or cache.log). Then, click **Next >** +4. Select **New line** as the record delimiter and click **Next >** +5. Select **Windows** or **Linux** and enter the path to Squid Proxy logs. Default paths are: + - **Windows** directory: `C:\Squid\var\log\squid\*.log` + - **Linux** Directory: `/var/log/squid/*.log` +6. After entering the path, click the '+' symbol to apply, then click **Next >** +7. Add **SquidProxy_CL** as the custom log Name and click **Done** + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ssg.md b/Tools/Solutions Analyzer/connector-docs/connectors/ssg.md index 4699c53c257..6510305685f 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ssg.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ssg.md @@ -10,4 +10,25 @@ The SINEC Security Guard solution for Microsoft Sentinel allows you to ingest security events of your industrial networks from the [SINEC Security Guard](https://siemens.com/sinec-security-guard) into Microsoft Sentinel +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +This Data Connector relies on the SINEC Security Guard Sensor Package to be able to receive Sensor events in Microsoft Sentinel. The Sensor Package can be purchased in the Siemens Xcelerator Marketplace. +**1. Please follow the steps to configure the data connector** + +**Set up the SINEC Security Guard Sensor** + + Detailed step for setting up the sensor. + + **Create the Data Connector and configure it in the SINEC Security Guard web interface** + + Instructions on configuring the data connector. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/stealthwatch.md b/Tools/Solutions Analyzer/connector-docs/connectors/stealthwatch.md index 14b2b6d63a5..f7e051a1927 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/stealthwatch.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/stealthwatch.md @@ -10,4 +10,62 @@ The [Cisco Secure Cloud Analytics](https://www.cisco.com/c/en/us/products/security/stealthwatch/index.html) data connector provides the capability to ingest [Cisco Secure Cloud Analytics events](https://www.cisco.com/c/dam/en/us/td/docs/security/stealthwatch/management_console/securit_events_alarm_categories/7_4_2_Security_Events_and_Alarm_Categories_DV_2_1.pdf) into Microsoft Sentinel. Refer to [Cisco Secure Cloud Analytics documentation](https://www.cisco.com/c/dam/en/us/td/docs/security/stealthwatch/system_installation_configuration/7_5_0_System_Configuration_Guide_DV_1_3.pdf) for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**StealthwatchEvent**](https://aka.ms/sentinel-stealthwatch-parser) which is deployed with the Microsoft Sentinel Solution. + +>**NOTE:** This data connector has been developed using Cisco Secure Cloud Analytics version 7.3.2 + +**1. Install and onboard the agent for Linux or Windows** + +Install the agent on the Server where the Cisco Secure Cloud Analytics logs are forwarded. + +> Logs from Cisco Secure Cloud Analytics Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure Cisco Secure Cloud Analytics event forwarding** + +Follow the configuration steps below to get Cisco Secure Cloud Analytics logs into Microsoft Sentinel. +1. Log in to the Stealthwatch Management Console (SMC) as an administrator. +2. In the menu bar, click **Configuration** **>** **Response Management**. +3. From the **Actions** section in the **Response Management** menu, click **Add > Syslog Message**. +4. In the Add Syslog Message Action window, configure parameters. +5. Enter the following custom format: +|Lancope|Stealthwatch|7.3|{alarm_type_id}|0x7C|src={source_ip}|dst={target_ip}|dstPort={port}|proto={protocol}|msg={alarm_type_description}|fullmessage={details}|start={start_active_time}|end={end_active_time}|cat={alarm_category_name}|alarmID={alarm_id}|sourceHG={source_host_group_names}|targetHG={target_host_group_names}|sourceHostSnapshot={source_url}|targetHostSnapshot={target_url}|flowCollectorName={device_name}|flowCollectorIP={device_ip}|domain={domain_name}|exporterName={exporter_hostname}|exporterIPAddress={exporter_ip}|exporterInfo={exporter_label}|targetUser={target_username}|targetHostname={target_hostname}|sourceUser={source_username}|alarmStatus={alarm_status}|alarmSev={alarm_severity_name} + +6. Select the custom format from the list and click **OK** +7. Click **Response Management > Rules**. +8. Click **Add** and select **Host Alarm**. +9. Provide a rule name in the **Name** field. +10. Create rules by selecting values from the Type and Options menus. To add more rules, click the ellipsis icon. For a Host Alarm, combine as many possible types in a statement as possible. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/styxviewendpointconnectordefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/styxviewendpointconnectordefinition.md index 63d9a9e44d2..ff7f608f11d 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/styxviewendpointconnectordefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/styxviewendpointconnectordefinition.md @@ -10,4 +10,22 @@ The [StyxView Alerts](https://styxintel.com/) data connector enables seamless integration between the StyxView Alerts platform and Microsoft Sentinel. This connector ingests alert data from the StyxView Alerts API, allowing organizations to centralize and correlate actionable threat intelligence directly within their Microsoft Sentinel workspace. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **StyxView Alert API access**: Access to the StyxView Alerts API through an API key is required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect to StyxView Alerts API to start collecting alert logs in Microsoft Sentinel** + +Contact Styx Intelligence Support (support.team@styxintel.com) to get access to an API key. +- **API Token**: (password field) +- Click 'Connect' to establish connection + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/symantec.md b/Tools/Solutions Analyzer/connector-docs/connectors/symantec.md index fc778041450..fa45b812fa5 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/symantec.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/symantec.md @@ -10,4 +10,29 @@ Symantec ICDx connector allows you to easily connect your Symantec security solutions logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization’s network and improves your security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. +- **Keys** (Workspace): read permissions to shared keys for the workspace. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Configure and connect Symantec ICDx** + +1. On the ICDx navigation bar, click **Configuration**. +2. At the top of the **Configuration** screen, click **Forwarders**, and next to Microsoft Sentinel (Log Analytics), click **Add**. +3. In the Microsoft Sentinel (Log Analytics) window that opens, click **Show Advanced**. [See the documentation to set advanced features](https://aka.ms/SymantecICDX-learn-more). +4. Make sure that you set a name for the forwarder and under Azure Destination, set these required fields: + - Workspace ID: Paste the Workspace ID from the Microsoft Sentinel portal connector page. + - Primary Key: Paste the Primary Key from the Microsoft Sentinel portal connector page. + - Custom Log Name: Type the custom log name in the Microsoft Azure portal Log Analytics workspace to which you are going to forward events. The default is SymantecICDx. +5. Click Save and to start the forwarder, go to Options > More and click **Start**. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/symantecendpointprotection.md b/Tools/Solutions Analyzer/connector-docs/connectors/symantecendpointprotection.md index 32ea57799f6..522b5ca3410 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/symantecendpointprotection.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/symantecendpointprotection.md @@ -10,4 +10,47 @@ The [Broadcom Symantec Endpoint Protection (SEP)](https://www.broadcom.com/products/cyber-security/endpoint/end-user/enterprise) connector allows you to easily connect your SEP logs with Microsoft Sentinel. This gives you more insight into your organization's network and improves your security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +**Custom Permissions:** +- **Symantec Endpoint Protection (SEP)**: must be configured to export logs via Syslog + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Symantec Endpoint Protection and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Symantec%20Endpoint%20Protection/Parsers/SymantecEndpointProtection.yaml), on the second line of the query, enter the hostname(s) of your SymantecEndpointProtection device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. + 2. Select **Apply below configuration to my machines** and select the facilities and severities. + 3. Click **Save**. +- **Open Syslog settings** + +**3. Configure and connect the Symantec Endpoint Protection** + +[Follow these instructions](https://techdocs.broadcom.com/us/en/symantec-security-software/endpoint-security-and-management/endpoint-protection/all/Monitoring-Reporting-and-Enforcing-Compliance/viewing-logs-v7522439-d37e464/exporting-data-to-a-syslog-server-v8442743-d15e1107.html) to configure the Symantec Endpoint Protection to forward syslog. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/symantecproxysg.md b/Tools/Solutions Analyzer/connector-docs/connectors/symantecproxysg.md index 120a99c79ff..f9f5ce706a9 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/symantecproxysg.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/symantecproxysg.md @@ -10,4 +10,55 @@ The [Symantec ProxySG](https://www.broadcom.com/products/cyber-security/network/gateway/proxy-sg-and-advanced-secure-gateway) allows you to easily connect your Symantec ProxySG logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigations. Integrating Symantec ProxySG with Microsoft Sentinel provides more visibility into your organization's network proxy traffic and will enhance security monitoring capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +**Custom Permissions:** +- **Symantec ProxySG**: must be configured to export logs via Syslog + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Symantec Proxy SG and load the function code or click [here](https://aka.ms/sentinel-SymantecProxySG-parser), on the second line of the query, enter the hostname(s) of your Symantec Proxy SG device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. + 2. Select **Apply below configuration to my machines** and select the facilities and severities. + 3. Click **Save**. +- **Open Syslog settings** + +**3. Configure and connect the Symantec ProxySG** + +1. Log in to the Blue Coat Management Console . + 2. Select Configuration > Access Logging > Formats. + 3. Select New. + 4. Enter a unique name in the Format Name field. + 5. Click the radio button for **Custom format string** and paste the following string into the field. +

1 $(date) $(time) $(time-taken) $(c-ip) $(cs-userdn) $(cs-auth-groups) $(x-exception-id) $(sc-filter-result) $(cs-categories) $(quot)$(cs(Referer))$(quot) $(sc-status) $(s-action) $(cs-method) $(quot)$(rs(Content-Type))$(quot) $(cs-uri-scheme) $(cs-host) $(cs-uri-port) $(cs-uri-path) $(cs-uri-query) $(cs-uri-extension) $(quot)$(cs(User-Agent))$(quot) $(s-ip) $(sr-bytes) $(rs-bytes) $(x-virus-id) $(x-bluecoat-application-name) $(x-bluecoat-application-operation) $(cs-uri-port) $(x-cs-client-ip-country) $(cs-threat-risk)

+ 6. Click the **OK** button. + 7. Click the **Apply** button. + 8. [Follow these instructions](https://knowledge.broadcom.com/external/article/166529/sending-access-logs-to-a-syslog-server.html) to enable syslog streaming of **Access** Logs. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/symantecvip.md b/Tools/Solutions Analyzer/connector-docs/connectors/symantecvip.md index 83140296500..add60714e5f 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/symantecvip.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/symantecvip.md @@ -10,4 +10,47 @@ The [Symantec VIP](https://vip.symantec.com/) connector allows you to easily connect your Symantec VIP logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +**Custom Permissions:** +- **Symantec VIP**: must be configured to export logs via Syslog + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Symantec VIP and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Symantec%20VIP/Parsers/SymantecVIP.yaml), on the second line of the query, enter the hostname(s) of your Symantec VIP device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. + 2. Select **Apply below configuration to my machines** and select the facilities and severities. + 3. Click **Save**. +- **Open Syslog settings** + +**3. Configure and connect the Symantec VIP** + +[Follow these instructions](https://aka.ms/sentinel-symantecvip-configurationsteps) to configure the Symantec VIP Enterprise Gateway to forward syslog. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/syslog.md b/Tools/Solutions Analyzer/connector-docs/connectors/syslog.md index 66cbab6fb3b..fb59e6d4538 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/syslog.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/syslog.md @@ -14,4 +14,40 @@ Syslog is an event logging protocol that is common to Linux. Applications will s [Learn more >](https://aka.ms/sysLogInfo) +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Install and onboard the agent for Linux** + +You can collect Syslog events from your local machine by installing the agent on it. You can also collect Syslog generated on a different source by running the installation script below on the local machine, where the agent is installed. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + +1. Select the link below to open your workspace **agents configuration**, and select the **Syslog** tab. +2. Select **Add facility** and choose from the drop-down list of facilities. Repeat for all the facilities you want to add. +3. Mark the check boxes for the desired severities for each facility. +4. Click **Apply**. +- **Open Syslog settings** + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/syslogama.md b/Tools/Solutions Analyzer/connector-docs/connectors/syslogama.md index 59c399cf3c7..64374945365 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/syslogama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/syslogama.md @@ -14,4 +14,28 @@ Syslog is an event logging protocol that is common to Linux. Applications will s [Learn more >](https://aka.ms/sysLogInfo) +## Permissions + +**Resource Provider Permissions:** +- **Workspace data sources** (Workspace): read and write permissions. + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Enable data collection rule​** + +You can collect Syslog events from your local machine by installing the agent on it. You can also collect Syslog generated on a different source by running the installation script below on the local machine, where the agent is installed. + +> Syslog logs are collected only from **Linux** agents. +- Configure SysLogAma data connector + +- **Create data collection rule** + +**2. Run the following command to install and apply the Syslog collector:** + +> To collect logs generated on a different machine run this script on the machine where the agent is installed. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/talonlogs.md b/Tools/Solutions Analyzer/connector-docs/connectors/talonlogs.md index da123c54dd0..72c9a098348 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/talonlogs.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/talonlogs.md @@ -10,4 +10,20 @@ The Talon Security Logs connector allows you to easily connect your Talon events and audit logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Please note the values below and follow the instructions here to connect your Talon Security events and audit logs with Microsoft Sentinel. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/teamcymruscout.md b/Tools/Solutions Analyzer/connector-docs/connectors/teamcymruscout.md index f5eded24f01..0b6c7f1e48b 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/teamcymruscout.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/teamcymruscout.md @@ -10,4 +10,180 @@ The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Permission to assign a role to the registered application**: Permission to assign a role to the registered application in Microsoft Entra ID is required. +- **Team Cymru Scout Credentials/permissions**: Team Cymru Scout account credentials(Username, Password) is required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Steps to Create Team Cymru Scout API Key** + + Follow these instructions to create a Team Cymru Scout API Key. + 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization. + +**STEP 2 - App Registration steps for the Application in Microsoft Entra ID** + + This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID: + 1. Sign in to the [Azure portal](https://portal.azure.com/). + 2. Search for and select **Microsoft Entra ID**. + 3. Under **Manage**, select **App registrations > New registration**. + 4. Enter a display **Name** for your application. + 5. Select **Register** to complete the initial app registration. + 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app) + +**STEP 3 - Add a client secret for application in Microsoft Entra ID** + + Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret: + 1. In the Azure portal, in **App registrations**, select your application. + 2. Select **Certificates & secrets > Client secrets > New client secret**. + 3. Add a description for your client secret. + 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months. + 5. Select **Add**. + 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret) + +**STEP 4 - Get Object ID of your application in Microsoft Entra ID** + + After creating your app registration, follow the steps in this section to get Object ID: + 1. Go to **Microsoft Entra ID**. + 2. Select **Enterprise applications** from the left menu. + 3. Find your newly created application in the list (you can search by the name you provided). + 4. Click on the application. + 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment. + +**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID** + + Follow the steps in this section to assign the role: + 1. In the Azure portal, Go to **Resource Group** and select your resource group. + 2. Go to **Access control (IAM)** from left panel. + 3. Click on **Add**, and then select **Add role assignment**. + 4. Select **Contributor** as role and click on next. + 5. In **Assign access to**, select `User, group, or service principal`. + 6. Click on **add members** and type **your app name** that you have created and select it. + 7. Now click on **Review + assign** and then again click on **Review + assign**. + +> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal) + +**STEP 6 - Upload csv with indictaors in Watchlist** + + Follow the steps in this section to upload csv containing indicators in watchlist: + 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace. + 2. Go to **Watchlist** under **Configuration** section from left panel. + 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**. + 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**. + 5. Once validation is successful, click on **Update**. + 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. + +> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist) + +**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +> + +**8. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the TeamCymruScout data connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the below information : + Location + WorkspaceName + Function Name + TeamCymruScoutBaseURL + AuthenticationType + Username + Password + APIKey + IPValues + DomainValues + APIType + AzureClientId + AzureClientSecret + TenantId + AzureEntraObjectId + IPTableName + DomainTableName + AccountUsageTableName + Schedule + AccountUsageSchedule + LogLevel +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**9. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX). + + e. **Select a runtime:** Choose Python 3.12 or above. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective values (case-sensitive): + CymruScoutBaseURL + AuthenticationType + TeamCymruScoutUsername + TeamCymruScoutPassword + APIKey + IPValues + DomainValues + APIType + AZURE_CLIENT_ID + AZURE_CLIENT_SECRET + AZURE_TENANT_ID + IPTableName + DomainTableName + AccountUsageTableName + Schedule + AccountUsageSchedule + LogLevel + AZURE_DATA_COLLECTION_ENDPOINT + AZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES + AZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES +4. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/tenable.ad.md b/Tools/Solutions Analyzer/connector-docs/connectors/tenable.ad.md index 96cc2b3b8f6..bdae56b1aef 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/tenable.ad.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/tenable.ad.md @@ -12,4 +12,94 @@ Tenable.ad connector allows to export Tenable.ad Indicators of Exposures, trailf It provides a data parser to manipulate the logs more easily. The different workbooks ease your Active Directory monitoring and provide different ways to visualize the data. The analytic templates allow to automate responses regarding different events, exposures, or attacks. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Access to Tenable.ad Configuration**: Permissions to configure syslog alerting engine + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://raw.githubusercontent.com/tenable/Azure-Sentinel/Tenable.ad-connector/Solutions/TenableAD/Parsers/afad_parser.kql) to create the Kusto Functions alias, **afad_parser** + +**1. Configure the Syslog server** + +You will first need a **linux Syslog** server that Tenable.ad will send logs to. Typically you can run **rsyslog** on **Ubuntu**. + You can then configure this server as you wish, but it is recommended to be able to output Tenable.ad logs in a separate file. + +Configure rsyslog to accept logs from your Tenable.ad IP address.: + +```shell +sudo -i + +# Set Tenable.ad source IP address +export TENABLE_AD_IP={Enter your IP address} + +# Create rsyslog configuration file +cat > /etc/rsyslog.d/80-tenable.conf << EOF +\$ModLoad imudp +\$UDPServerRun 514 +\$ModLoad imtcp +\$InputTCPServerRun 514 +\$AllowedSender TCP, 127.0.0.1, $TENABLE_AD_IP +\$AllowedSender UDP, 127.0.0.1, $TENABLE_AD_IP +\$template MsgTemplate,"%TIMESTAMP:::date-rfc3339% %HOSTNAME% %programname%[%procid%]:%msg%\n" +\$template remote-incoming-logs, "/var/log/%PROGRAMNAME%.log" +*.* ?remote-incoming-logs;MsgTemplate +EOF + +# Restart rsyslog +systemctl restart rsyslog +``` + +**2. Install and onboard the Microsoft agent for Linux** + +The OMS agent will receive the Tenable.ad syslog events and publish it in Sentinel : +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**3. Check agent logs on the Syslog server** + +```shell +tail -f /var/opt/microsoft/omsagent/log/omsagent.log +``` + +**4. Configure Tenable.ad to send logs to your Syslog server** + +On your **Tenable.ad** portal, go to *System*, *Configuration* and then *Syslog*. +From there you can create a new Syslog alert toward your Syslog server. + +Once this is done, check that the logs are correctly gathered on your server in a separate file (to do this, you can use the *Test the configuration* button in the Syslog alert configuration in Tenable.ad). +If you used the Quickstart template, the Syslog server will by default listen on port 514 in UDP and 1514 in TCP, without TLS. + +**5. Configure the custom logs** + +Configure the agent to collect the logs. + +1. In Sentinel, go to **Configuration** -> **Settings** -> **Workspace settings** -> **Custom logs**. +2. Click **Add custom log**. +3. Upload a sample Tenable.ad.log Syslog file from the **Linux** machine running the **Syslog** server and click **Next** +4. Set the record delimiter to **New Line** if not already the case and click **Next**. +5. Select **Linux** and enter the file path to the **Syslog** file, click **+** then **Next**. The default location of the file is `/var/log/Tenable.ad.log` if you have a Tenable version <3.1.0, you must also add this linux file location `/var/log/AlsidForAD.log`. +6. Set the **Name** to *Tenable_ad_CL* (Azure automatically adds *_CL* at the end of the name, there must be only one, make sure the name is not *Tenable_ad_CL_CL*). +7. Click **Next**, you will see a resume, then click **Create** + +**6. Enjoy !** + +> You should now be able to receive logs in the *Tenable_ad_CL* table, logs data can be parse using the **afad_parser()** function, used by all query samples, workbooks and analytic templates. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/tenableie.md b/Tools/Solutions Analyzer/connector-docs/connectors/tenableie.md index 161332a1988..181241d9e00 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/tenableie.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/tenableie.md @@ -10,4 +10,94 @@ Tenable Identity Exposure connector allows Indicators of Exposure, Indicators of Attack and trailflow logs to be ingested into Microsoft Sentinel.The different work books and data parsers allow you to more easily manipulate logs and monitor your Active Directory environment. The analytic templates allow you to automate responses regarding different events, exposures and attacks. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Access to TenableIE Configuration**: Permissions to configure syslog alerting engine + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>This data connector depends on [afad_parser](https://aka.ms/sentinel-TenableApp-afad-parser) based on a Kusto Function to work as expected which is deployed with the Microsoft Sentinel Solution. + +**1. Configure the Syslog server** + +You will first need a **linux Syslog** server that TenableIE will send logs to. Typically you can run **rsyslog** on **Ubuntu**. + You can then configure this server as you wish, but it is recommended to be able to output TenableIE logs in a separate file. + +Configure rsyslog to accept logs from your TenableIE IP address.: + +```shell +sudo -i + +# Set TenableIE source IP address +export TENABLE_IE_IP={Enter your IP address} + +# Create rsyslog configuration file +cat > /etc/rsyslog.d/80-tenable.conf << EOF +\$ModLoad imudp +\$UDPServerRun 514 +\$ModLoad imtcp +\$InputTCPServerRun 514 +\$AllowedSender TCP, 127.0.0.1, $TENABLE_IE_IP +\$AllowedSender UDP, 127.0.0.1, $TENABLE_IE_IP +\$template MsgTemplate,"%TIMESTAMP:::date-rfc3339% %HOSTNAME% %programname%[%procid%]:%msg%\n" +\$template remote-incoming-logs, "/var/log/%PROGRAMNAME%.log" +*.* ?remote-incoming-logs;MsgTemplate +EOF + +# Restart rsyslog +systemctl restart rsyslog +``` + +**2. Install and onboard the Microsoft agent for Linux** + +The OMS agent will receive the TenableIE syslog events and publish it in Microsoft Sentinel : +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**3. Check agent logs on the Syslog server** + +```shell +tail -f /var/opt/microsoft/omsagent/log/omsagent.log +``` + +**4. Configure TenableIE to send logs to your Syslog server** + +On your **TenableIE** portal, go to *System*, *Configuration* and then *Syslog*. +From there you can create a new Syslog alert toward your Syslog server. + +Once this is done, check that the logs are correctly gathered on your server in a separate file (to do this, you can use the *Test the configuration* button in the Syslog alert configuration in TenableIE). +If you used the Quickstart template, the Syslog server will by default listen on port 514 in UDP and 1514 in TCP, without TLS. + +**5. Configure the custom logs** + +Configure the agent to collect the logs. + +1. In Microsoft Sentinel, go to **Configuration** -> **Settings** -> **Workspace settings** -> **Custom logs**. +2. Click **Add custom log**. +3. Upload a sample TenableIE.log Syslog file from the **Linux** machine running the **Syslog** server and click **Next** +4. Set the record delimiter to **New Line** if not already the case and click **Next**. +5. Select **Linux** and enter the file path to the **Syslog** file, click **+** then **Next**. The default location of the file is `/var/log/TenableIE.log` if you have a Tenable version <3.1.0, you must also add this linux file location `/var/log/AlsidForAD.log`. +6. Set the **Name** to *Tenable_IE_CL* (Azure automatically adds *_CL* at the end of the name, there must be only one, make sure the name is not *Tenable_IE_CL_CL*). +7. Click **Next**, you will see a resume, then click **Create** + +**6. Enjoy !** + +> You should now be able to receive logs in the *Tenable_IE_CL* table, logs data can be parse using the **afad_parser()** function, used by all query samples, workbooks and analytic templates. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/tenableioapi.md b/Tools/Solutions Analyzer/connector-docs/connectors/tenableioapi.md index 4d2fd7a972c..e801c938db2 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/tenableioapi.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/tenableioapi.md @@ -10,4 +10,92 @@ The [Tenable.io](https://www.tenable.com/products/tenable-io) data connector provides the capability to ingest Asset and Vulnerability data into Microsoft Sentinel through the REST API from the Tenable.io platform (Managed in the cloud). Refer to [API documentation](https://developer.tenable.com/reference) for more information. The connector provides the ability to get data which helps to examine potential security risks, get insight into your computing assets, diagnose configuration problems and more +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: Both a **TenableAccessKey** and a **TenableSecretKey** is required to access the Tenable REST API. [See the documentation to learn more about API](https://developer.tenable.com/reference#vulnerability-management). Check all [requirements and follow the instructions](https://docs.tenable.com/tenableio/vulnerabilitymanagement/Content/Settings/GenerateAPIKey.htm) for obtaining credentials. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Durable Functions to connect to the Tenable.io API to pull [assets](https://developer.tenable.com/reference#exports-assets-download-chunk) and [vulnerabilities](https://developer.tenable.com/reference#exports-vulns-request-export) at a regular interval into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This data connector depends on a [**Tenable.io parser for vulnerabilities**](https://aka.ms/sentinel-TenableIO-TenableIOVulnerabilities-parser) and a [**Tenable.io parser for assets**](https://aka.ms/sentinel-TenableIO-TenableIOAssets-parser) based on a Kusto Function to work as expected which is deployed with the Microsoft Sentinel Solution. + +**STEP 1 - Configuration steps for Tenable.io** + + [Follow the instructions](https://docs.tenable.com/tenableio/vulnerabilitymanagement/Content/Settings/GenerateAPIKey.htm) to obtain the required API credentials. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function App** + +>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Tenable.io Vulnerability Management Report data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TenableIO-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **TenableAccessKey** and **TenableSecretKey** and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Tenable.io Vulnerability Management Report data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-TenableIO-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. TenableIOXXXXX). + + e. **Select a runtime:** Choose Python 3.8. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + TenableAccessKey + TenableSecretKey + WorkspaceID + WorkspaceKey + logAnalyticsUri (optional) +> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +3. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/tenablevm.md b/Tools/Solutions Analyzer/connector-docs/connectors/tenablevm.md index 0a68b9062b2..8497f1dfea6 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/tenablevm.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/tenablevm.md @@ -10,4 +10,191 @@ The TVM data connector provides the ability to ingest Asset, Vulnerability, Compliance, WAS assets and WAS vulnerabilities data into Microsoft Sentinel using TVM REST APIs. Refer to [API documentation](https://developer.tenable.com/reference) for more information. The connector provides the ability to get data which helps to examine potential security risks, get insight into your computing assets, diagnose configuration problems and more +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: Both a **TenableAccessKey** and a **TenableSecretKey** is required to access the Tenable REST API. [See the documentation to learn more about API](https://developer.tenable.com/reference#vulnerability-management). Check all [requirements and follow the instructions](https://docs.tenable.com/vulnerability-management/Content/Settings/my-account/GenerateAPIKey.htm) for obtaining credentials. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Durable Functions to connect to the TenableVM API to pull [assets](https://developer.tenable.com/reference#exports-assets-download-chunk), [vulnerabilities](https://developer.tenable.com/reference#exports-vulns-request-export) and [compliance](https://developer.tenable.com/reference#exports-compliance-request-export)(if selected) at a regular interval into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This data connector depends on a [**TenableVM parser for vulnerabilities**](https://aka.ms/sentinel-TenableApp-TenableVMVulnerabilities-parser) and a [**TenableVM parser for assets**](https://aka.ms/sentinel-TenableApp-TenableVMAssets-parser) based on a Kusto Function to work as expected which is deployed with the Microsoft Sentinel Solution. + +**STEP 1 - Configuration steps for TenableVM** + + [Follow the instructions](https://docs.tenable.com/vulnerability-management/Content/Settings/my-account/GenerateAPIKey.htm) to obtain the required API credentials. + +**STEP 2 - App Registration steps for the Application in Microsoft Entra ID** + + This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID: + 1. Sign in to the [Azure portal](https://portal.azure.com/). + 2. Search for and select **Microsoft Entra ID**. + 3. Under **Manage**, select **App registrations > New registration**. + 4. Enter a display **Name** for your application. + 5. Select **Register** to complete the initial app registration. + 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TenableVM Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app) + +**STEP 3 - Add a client secret for application in Microsoft Entra ID** + + Sometimes called an application password, a client secret is a string value required for the execution of TenableVM Data Connector. Follow the steps in this section to create a new Client Secret: + 1. In the Azure portal, in **App registrations**, select your application. + 2. Select **Certificates & secrets > Client secrets > New client secret**. + 3. Add a description for your client secret. + 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months. + 5. Select **Add**. + 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TenableVM Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret) + +**STEP 4 - Get Object ID of your application in Microsoft Entra ID** + + After creating your app registration, follow the steps in this section to get Object ID: + 1. Go to **Microsoft Entra ID**. + 2. Select **Enterprise applications** from the left menu. + 3. Find your newly created application in the list (you can search by the name you provided). + 4. Click on the application. + 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment. + +**STEP 5 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function App** + +**6. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the TenableVM Vulnerability Management Report data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TenableVM-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-TenableVM-azuredeploy-gov) +2. Select the preferred **Subscription**, **Resource Group**, **FunctionApp Name** and **Location**. +3. Enter the below information : + + a. **WorkspaceName** - Enter the Workspace Name of the log analytics Workspace. + + b. **TenableAccessKey** - Enter Access key for using the Tenable API. + + c. **TenableSecretKey** - Enter Tenable Secret Key for Authentication. + + d. **AzureClientID** - Enter Azure Client ID. + + e. **AzureClientSecret** - Enter Azure Client Secret. + + f. **TenantID** - Enter Tenant ID got from above steps. + + g. **AzureEntraObjectId** - Enter Azure Object ID got from above steps. + + h. **LowestSeveritytoStore** - Lowest vulnerability severity to store. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. + + i. **ComplianceDataIngestion** - Select true if you want to enable Compliance data ingestion from Tenable VM. Default is false. + + j. **WASAssetDataIngestion** - Select true if you want to enable WAS Asset data ingestion from Tenable VM. Default is false. + + k. **WASVulnerabilityDataIngestion** - Select true if you want to enable WAS Vulnerability data ingestion from Tenable VM. Default is false. + + l. **LowestSeveritytoStoreWAS** - The Lowest Vulnerability severity to store for WAS. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. + + m. **TenableExportScheduleInMinutes** - Schedule in minutes to create new export job from Tenable VM. Default is 1440. + + n. **AssetTableName** - Enter name of the table used to store Asset Data logs. + + o. **VulnTableName** - Enter name of the table used to store Vulnerability Data logs. + + p. **ComplianceTableName** - Enter name of the table used to store Compliance Data logs. + + q. **WASAssetTableName** - Enter name of the table used to store WAS Asset Data logs. + + r. **WASVulnTableName** - Enter name of the table used to store WAS Vulnerability Data logs. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**7. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the TenableVM Vulnerability Management Report data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-TenableVMAzureSentinelConnector310Updated-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. TenableVMXXXXX). + + e. **Select a runtime:** Choose Python 3.12. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + + a. **WorkspaceName** - Enter the Workspace Name of the log analytics Workspace. + + b. **TenableAccessKey** - Enter Access key for using the Tenable API. + + c. **TenableSecretKey** - Enter Tenable Secret Key for Authentication. + + d. **AzureClientID** - Enter Azure Client ID. + + e. **AzureClientSecret** - Enter Azure Client Secret. + + f. **TenantID** - Enter Tenant ID got from above steps. + + g. **AzureEntraObjectId** - Enter Azure Object ID got from above steps. + + h. **LowestSeveritytoStore** - Lowest vulnerability severity to store. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. + + i. **ComplianceDataIngestion** - Select true if you want to enable Compliance data ingestion from Tenable VM. Default is false. + + j. **WASAssetDataIngestion** - Select true if you want to enable WAS Asset data ingestion from Tenable VM. Default is false. + + k. **WASVulnerabilityDataIngestion** - Select true if you want to enable WAS Vulnerability data ingestion from Tenable VM. Default is false. + + l. **LowestSeveritytoStoreWAS** - The Lowest Vulnerability severity to store for WAS. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. + + m. **TenableExportScheduleInMinutes** - Schedule in minutes to create new export job from Tenable VM. Default is 1440. + + n. **AssetTableName** - Enter name of the table used to store Asset Data logs. + + o. **VulnTableName** - Enter name of the table used to store Vulnerability Data logs. + + p. **ComplianceTableName** - Enter name of the table used to store Compliance Data logs. + + q. **WASAssetTableName** - Enter name of the table used to store WAS Asset Data logs. + + r. **WASVulnTableName** - Enter name of the table used to store WAS Vulnerability Data logs. + + s. **PyTenableUAVendor** - Value must be set to **Microsoft**. + + t. **PyTenableUAProduct** - Value must be set to **Azure Sentinel**. + + u. **PyTenableUABuild** - Value must be set to **0.0.1**. +3. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/thehiveprojectthehive.md b/Tools/Solutions Analyzer/connector-docs/connectors/thehiveprojectthehive.md index 1ae74781d70..195578601fd 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/thehiveprojectthehive.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/thehiveprojectthehive.md @@ -10,4 +10,78 @@ The [TheHive](http://thehive-project.org/) data connector provides the capability to ingest common TheHive events into Microsoft Sentinel through Webhooks. TheHive can notify external system of modification events (case creation, alert update, task assignment) in real time. When a change occurs in the TheHive, an HTTPS POST request with event information is sent to a callback data connector URL. Refer to [Webhooks documentation](https://docs.thehive-project.org/thehive/legacy/thehive3/admin/webhooks/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Webhooks Credentials/permissions**: **TheHiveBearerToken**, **Callback URL** are required for working Webhooks. See the documentation to learn more about [configuring Webhooks](https://docs.thehive-project.org/thehive/installation-and-configuration/configuration/webhooks/). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector uses Azure Functions based on HTTP Trigger for waiting POST requests with logs to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**TheHive**](https://aka.ms/sentinel-TheHive-parser) which is deployed with the Microsoft Sentinel Solution. + +**STEP 1 - Configuration steps for the TheHive** + + Follow the [instructions](https://docs.thehive-project.org/thehive/installation-and-configuration/configuration/webhooks/) to configure Webhooks. + +1. Authentication method is *Beared Auth*. +2. Generate the **TheHiveBearerToken** according to your password policy. +3. Setup Webhook notifications in the *application.conf* file including **TheHiveBearerToken** parameter. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the TheHive data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Option 1 - Azure Resource Manager (ARM) Template** + + Use this method for automated deployment of the TheHive data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TheHive-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. +3. Enter the **TheHiveBearerToken** and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. +6. After deploying open Function App page, select your app, go to the **Functions** and click **Get Function Url** copy it and follow p.7 from STEP 1. + + **Option 2 - Manual Deployment of Azure Functions** + + Use the following step-by-step instructions to deploy the TheHive data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-TheHive-functionapp) file. Extract archive to your local development computer. +2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode. +3. After successful deployment of the function app, follow next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. Go to Azure Portal for the Function App configuration. +2. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select ** New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + TheHiveBearerToken + WorkspaceID + WorkspaceKey + logAnalyticsUri (optional) +> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/theom.md b/Tools/Solutions Analyzer/connector-docs/connectors/theom.md index 31efcd11f7a..8869eadf262 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/theom.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/theom.md @@ -10,4 +10,25 @@ Theom Data Connector enables organizations to connect their Theom environment to Microsoft Sentinel. This solution enables users to receive alerts on data security risks, create and enrich incidents, check statistics and trigger SOAR playbooks in Microsoft Sentinel +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +1. In **Theom UI Console** click on **Manage -> Alerts** on the side bar. +2. Select **Sentinel** tab. +3. Click on **Active** button to enable the configuration. +4. Enter `Primary` key as `Authorization Token` +5. Enter `Endpoint URL` as `https://.ods.opinsights.azure.com/api/logs?api-version=2016-04-01` +6. Click on `SAVE SETTINGS` +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/threatintelligence.md b/Tools/Solutions Analyzer/connector-docs/connectors/threatintelligence.md index 95aeebe3964..bdc915dfdab 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/threatintelligence.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/threatintelligence.md @@ -10,4 +10,45 @@ Microsoft Sentinel integrates with Microsoft Graph Security API data sources to enable monitoring, alerting, and hunting using your threat intelligence. Use this connector to send threat indicators to Microsoft Sentinel from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MindMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, and file hashes. For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2223729&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. + +**Tenant Permissions:** +Requires GlobalAdmin, SecurityAdmin on the workspace's tenant + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. You can connect your threat intelligence data sources to Microsoft Sentinel by either:** + +- Using an integrated Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MindMeld, MISP, and others. + +- Calling the Microsoft Graph Security API directly from another application. + +**2. Follow These Steps to Connect your Threat Intelligence:** + +1) [Register an application](https://docs.microsoft.com/graph/auth-v2-service#1-register-your-app) in Azure Active Directory. + +2) [Configure permissions](https://docs.microsoft.com/graph/auth-v2-service#2-configure-permissions-for-microsoft-graph) and be sure to add the ThreatIndicators.ReadWrite.OwnedBy permission to the application. + +3) Ask your Azure AD tenant administrator to [grant consent](https://docs.microsoft.com/graph/auth-v2-service#3-get-administrator-consent) to the application. + +4) Configure your TIP or other integrated application to push indicators to Microsoft Sentinel by specifying the following: + + a. The application ID and secret you received when registering the app (step 1 above). + + b. Set “Microsoft Sentinel” as the target. + + c. Set an action for each indicator - ‘alert’ is most relevant for Microsoft Sentinel use cases + +For the latest list of integrated Threat Intelligence Platforms and detailed configuration instructions, see the [full documentation](https://docs.microsoft.com/azure/sentinel/import-threat-intelligence#adding-threat-indicators-to-azure-sentinel-with-the-threat-intelligence-platforms-data-connector). + +Click on "Connect" below + +> Data from all regions will be sent to and stored in the workspace's region. +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `ThreatIntelligence`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/threatintelligencetaxii.md b/Tools/Solutions Analyzer/connector-docs/connectors/threatintelligencetaxii.md index 7180a8fc699..605cd2ed4ef 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/threatintelligencetaxii.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/threatintelligencetaxii.md @@ -10,4 +10,23 @@ Microsoft Sentinel integrates with TAXII 2.0 and 2.1 data sources to enable monitoring, alerting, and hunting using your threat intelligence. Use this connector to send the supported STIX object types from TAXII servers to Microsoft Sentinel. Threat indicators can include IP addresses, domains, URLs, and file hashes. For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2224105&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. + +**Custom Permissions:** +- **TAXII Server**: TAXII 2.0 or TAXII 2.1 Server URI and Collection ID. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Configure TAXII servers to stream STIX 2.0 or 2.1 STIX objects to Microsoft Sentinel** + +You can connect your TAXII servers to Microsoft Sentinel using the built-in TAXII connector. For detailed configuration instructions, see the [full documentation](https://docs.microsoft.com/azure/sentinel/import-threat-intelligence#adding-threat-indicators-to-azure-sentinel-with-the-threat-intelligence---taxii-data-connector). + +Enter the following information and select Add to configure your TAXII server. +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `ThreatIntelligenceTaxii`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/threatintelligencetaxiiexport.md b/Tools/Solutions Analyzer/connector-docs/connectors/threatintelligencetaxiiexport.md index 3fc0a7d0877..70d7b652952 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/threatintelligencetaxiiexport.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/threatintelligencetaxiiexport.md @@ -10,4 +10,19 @@ Microsoft Sentinel integrates with TAXII 2.1 servers to enable exporting of your threat intelligence objects. Use this connector to send the supported STIX object types from Microsoft Sentinel to TAXII servers. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. + +**Custom Permissions:** +- **TAXII Server**: TAXII 2.1 Server URL and Collection ID. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Configure TAXII servers to export STIX 2.1 objects to. Once configured, you can start exporting STIX objects from your TI repository** +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `ThreatIntelligenceTaxii`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/threatintelligenceuploadindicatorsapi.md b/Tools/Solutions Analyzer/connector-docs/connectors/threatintelligenceuploadindicatorsapi.md index 8bf674c6ab7..32b5f3c9716 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/threatintelligenceuploadindicatorsapi.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/threatintelligenceuploadindicatorsapi.md @@ -6,8 +6,53 @@ | **Publisher** | Microsoft | | **Tables Ingested** | [`ThreatIntelIndicators`](../tables-index.md#threatintelindicators), [`ThreatIntelObjects`](../tables-index.md#threatintelobjects), [`ThreatIntelligenceIndicator`](../tables-index.md#threatintelligenceindicator) | | **Used in Solutions** | [Threat Intelligence](../solutions/threat-intelligence.md), [Threat Intelligence (NEW)](../solutions/threat-intelligence-(new).md) | -| **Connector Definition Files** | [template_ThreatIntelligenceUploadIndicators.json](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence/Data%20Connectors/template_ThreatIntelligenceUploadIndicators.json), [template_ThreatIntelligenceUploadIndicators_ForGov.json](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence/Data%20Connectors/template_ThreatIntelligenceUploadIndicators_ForGov.json) | +| **Connector Definition Files** | [template_ThreatIntelligenceUploadIndicators_ForGov.json](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence/Data%20Connectors/template_ThreatIntelligenceUploadIndicators_ForGov.json) | Microsoft Sentinel offers a data plane API to bring in threat intelligence from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, file hashes and email addresses. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2269830&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permissions are required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. You can connect your threat intelligence data sources to Microsoft Sentinel by either:** + +>Using an integrated Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, and others. + +>Calling the Microsoft Sentinel data plane API directly from another application. + - Note: The 'Status' of the connector will not appear as 'Connected' here, because the data is ingested by making an API call. + +**2. Follow These Steps to Connect to your Threat Intelligence:** + +**1. Get Microsoft Entra ID Access Token** + +To send request to the APIs, you need to acquire Microsoft Entra ID access token. You can follow instruction in this page: https://docs.microsoft.com/azure/databricks/dev-tools/api/latest/aad/app-aad-token#get-an-azure-ad-access-token + - Notice: Please request Microsoft Entra ID access token with scope value: +Fairfax: https://management.usgovcloudapi.net/.default +Mooncake: https://management.chinacloudapi.cn/.default + +**2. Send STIX objects to Sentinel** + +You can send the supported STIX object types by calling our Upload API. For more information about the API, click [here](https://learn.microsoft.com/azure/sentinel/stix-objects-api). + +>HTTP method: POST + +>Endpoint: +Fairfax: https://api.ti.sentinel.azure.us/workspaces/[WorkspaceID]/threatintelligence-stix-objects:upload?api-version=2024-02-01-preview +Mooncake: https://api.ti.sentinel.azure.cn/workspaces/[WorkspaceID]/threatintelligence-stix-objects:upload?api-version=2024-02-01-preview + +>WorkspaceID: the workspace that the STIX objects are uploaded to. + + +>Header Value 1: "Authorization" = "Bearer [Microsoft Entra ID Access Token from step 1]" + + +> Header Value 2: "Content-Type" = "application/json" + +>Body: The body is a JSON object containing an array of STIX objects. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/transmitsecurity.md b/Tools/Solutions Analyzer/connector-docs/connectors/transmitsecurity.md index 5122a66cfcd..08a7d6d1c06 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/transmitsecurity.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/transmitsecurity.md @@ -10,4 +10,115 @@ The [Transmit Security] data connector provides the capability to ingest common Transmit Security API events into Microsoft Sentinel through the REST API. [Refer to API documentation for more information](https://developer.transmitsecurity.com/). The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Client ID**: **TransmitSecurityClientID** is required. See the documentation to learn more about API on the `https://developer.transmitsecurity.com/`. +- **REST API Client Secret**: **TransmitSecurityClientSecret** is required. See the documentation to learn more about API on the `https://developer.transmitsecurity.com/`. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Transmit Security API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Configuration steps for the Transmit Security API** + +Follow the instructions to obtain the credentials. + +1. Log in to the Transmit Security Portal. +2. Configure a [management app](https://developer.transmitsecurity.com/guides/user/management_apps/). Give the app a suitable name, for example, MyAzureSentinelCollector. +3. Save credentials of the new user for using in the data connector. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Transmit Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Transmit Security data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TransmitSecurityAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-TransmitSecurityAPI-azuredeploy-gov) + +2. Select the preferred **Subscription**, **Resource Group**, and **Location**. + +> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select an existing resource group without Windows apps in it or create a new resource group. + +3. Enter the **TransmitSecurityClientID**, **TransmitSecurityClientSecret**, **TransmitSecurityPullEndpoint**, **TransmitSecurityTokenEndpoint**, and deploy. + +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. + +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Transmit Security data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS Code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-TransmitSecurityAPI-functionapp) file. Extract the archive to your local development computer. + +2. Start VS Code. Choose **File** in the main menu and select **Open Folder**. + +3. Select the top-level folder from the extracted files. + +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. + + If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**. + + If you're already signed in, go to the next step. + +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option). + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs, choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. + +7. Go to the Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. + +2. Select **Environment variables**. + +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + + - **TransmitSecurityClientID** + - **TransmitSecurityClientSecret** + - **TransmitSecurityPullEndpoint** + - **TransmitSecurityTokenEndpoint** + - **WorkspaceID** + - **WorkspaceKey** + - **logAnalyticsUri** (optional) + + > - Use **logAnalyticsUri** to override the log analytics API endpoint for a dedicated cloud. For example, for the public cloud, leave the value empty; for the Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. + +4. Once all application settings have been entered, click **Apply**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/trendmicro.md b/Tools/Solutions Analyzer/connector-docs/connectors/trendmicro.md index ec4860c5f07..3f459722f02 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/trendmicro.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/trendmicro.md @@ -10,4 +10,61 @@ The Trend Micro Deep Security connector allows you to easily connect your Deep Security logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's networks/systems and improves your security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Trend Micro Deep Security logs to Syslog agent** + +1. Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure to send the logs to port 514 TCP on the machine's IP address. +2. Forward Trend Micro Deep Security events to the Syslog agent. +3. Define a new Syslog Configuration that uses the CEF format by referencing [this knowledge article](https://aka.ms/Sentinel-trendmicro-kblink) for additional information. +4. Configure the Deep Security Manager to use this new configuration to forward events to the Syslog agent using [these instructions](https://aka.ms/Sentinel-trendMicro-connectorInstructions). +5. Make sure to save the [TrendMicroDeepSecurity](https://aka.ms/TrendMicroDeepSecurityFunction) function so that it queries the Trend Micro Deep Security data properly. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/trendmicroapexone.md b/Tools/Solutions Analyzer/connector-docs/connectors/trendmicroapexone.md index 46fa72e195f..bb67b573287 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/trendmicroapexone.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/trendmicroapexone.md @@ -10,4 +10,61 @@ The [Trend Micro Apex One](https://www.trendmicro.com/en_us/business/products/user-protection/sps/endpoint.html) data connector provides the capability to ingest [Trend Micro Apex One events](https://aka.ms/sentinel-TrendMicroApex-OneEvents) into Microsoft Sentinel. Refer to [Trend Micro Apex Central](https://aka.ms/sentinel-TrendMicroApex-OneCentral) for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>This data connector depends on a parser based on a Kusto Function to work as expected [**TMApexOneEvent**](https://aka.ms/sentinel-TMApexOneEvent-parser) which is deployed with the Microsoft Sentinel Solution. + +>**NOTE:** This data connector has been developed using Trend Micro Apex Central 2019 + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Common Event Format (CEF) logs to Syslog agent** + +[Follow these steps](https://docs.trendmicro.com/en-us/enterprise/trend-micro-apex-central-2019-online-help/detections/logs_001/syslog-forwarding.aspx) to configure Apex Central sending alerts via syslog. While configuring, on step 6, select the log format **CEF**. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/trendmicroapexoneama.md b/Tools/Solutions Analyzer/connector-docs/connectors/trendmicroapexoneama.md index 2ca9455ee4f..6cb672129eb 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/trendmicroapexoneama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/trendmicroapexoneama.md @@ -10,4 +10,59 @@ The [Trend Micro Apex One](https://www.trendmicro.com/en_us/business/products/user-protection/sps/endpoint.html) data connector provides the capability to ingest [Trend Micro Apex One events](https://aka.ms/sentinel-TrendMicroApex-OneEvents) into Microsoft Sentinel. Refer to [Trend Micro Apex Central](https://aka.ms/sentinel-TrendMicroApex-OneCentral) for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>This data connector depends on a parser based on a Kusto Function to work as expected [**TMApexOneEvent**](https://aka.ms/sentinel-TMApexOneEvent-parser) which is deployed with the Microsoft Sentinel Solution. +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Common Event Format (CEF) logs to Syslog agent** + + [Follow these steps](https://docs.trendmicro.com/en-us/enterprise/trend-micro-apex-central-2019-online-help/detections/logs_001/syslog-forwarding.aspx) to configure Apex Central sending alerts via syslog. While configuring, on step 6, select the log format **CEF**. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/trendmicrocas.md b/Tools/Solutions Analyzer/connector-docs/connectors/trendmicrocas.md index 5728e76adb3..29cedbc5f7d 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/trendmicrocas.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/trendmicrocas.md @@ -10,4 +10,96 @@ The [Trend Micro Cloud App Security](https://www.trendmicro.com/en_be/business/products/user-protection/sps/email-and-collaboration/cloud-app-security.html) data connector provides the capability to retrieve security event logs of the services that Cloud App Security protects and more events into Microsoft Sentinel through the Log Retrieval API. Refer to API [documentation](https://docs.trendmicro.com/en-us/enterprise/cloud-app-security-integration-api-online-help/supported-cloud-app-/log-retrieval-api/get-security-logs.aspx) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **TrendMicroCASToken** and **TrendMicroCASServiceURL** are required for making API calls. See the [documentation](https://docs.trendmicro.com/en-us/enterprise/cloud-app-security-integration-api-online-help/getting-started-with/using-cloud-app-secu.aspx) to learn more about API. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Azure Blob Storage API to pull logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**TrendMicroCAS**](https://aka.ms/sentinel-TrendMicroCAS-parser) which is deployed with the Microsoft Sentinel Solution. + +**STEP 1 - Configuration steps for the Trend Micro Log Retrieval API** + + Follow the instructions to obtain the credentials. + +1. Obtain the **TrendMicroCASToken** using the [documentation](https://docs.trendmicro.com/en-us/enterprise/cloud-app-security-integration-api-online-help/getting-started-with/generating-an-authen.aspx). +2. Save credentials for using in the data connector. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Trend Micro Cloud App Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Trend Micro Cloud App Security data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TrendMicroCAS-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. +3. Enter the **TrendMicroCASToken**, **TrendMicroCASServiceURL** and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Trend Micro Cloud App Security data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-TMCASAPI-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. TMCASXXXXX). + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select ** New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + TrendMicroCASToken + TrendMicroCASServiceURL + WorkspaceID + WorkspaceKey + logAnalyticsUri (optional) +> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/trendmicrotippingpoint.md b/Tools/Solutions Analyzer/connector-docs/connectors/trendmicrotippingpoint.md index a50a323d2b7..2218c8833c0 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/trendmicrotippingpoint.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/trendmicrotippingpoint.md @@ -10,4 +10,59 @@ The Trend Micro TippingPoint connector allows you to easily connect your TippingPoint SMS IPS events with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's networks/systems and improves your security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias TrendMicroTippingPoint and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20TippingPoint/Parsers/TrendMicroTippingPoint).The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Trend Micro TippingPoint SMS logs to Syslog agent** + +Set your TippingPoint SMS to send Syslog messages in ArcSight CEF Format v4.2 format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/trendmicroxdr.md b/Tools/Solutions Analyzer/connector-docs/connectors/trendmicroxdr.md index 17358adc51f..68a9c71dc91 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/trendmicroxdr.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/trendmicroxdr.md @@ -14,4 +14,48 @@ The [Trend Vision One](https://www.trendmicro.com/en_us/business/products/detect The Trend Vision One connector is supported in Microsoft Sentinel in the following regions: Australia East, Australia Southeast, Brazil South, Canada Central, Canada East, Central India, Central US, East Asia, East US, East US 2, France Central, Japan East, Korea Central, North Central US, North Europe, Norway East, South Africa North, South Central US, Southeast Asia, Sweden Central, Switzerland North, UAE North, UK South, UK West, West Europe, West US, West US 2, West US 3. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Trend Vision One API Token**: A Trend Vision One API Token is required. See the documentation to learn more about the [Trend Vision One API](https://docs.trendmicro.com/documentation/article/trend-vision-one-api-keys-third-party-apps). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Trend Vision One API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Configuration steps for the Trend Vision One API** + + [Follow these instructions](https://docs.trendmicro.com/documentation/article/trend-vision-one-api-keys-third-party-apps) to create an account and an API authentication token. + +**STEP 2 - Use the below deployment option to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Trend Vision One connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Trend Vision One API Authorization Token, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Azure Resource Manager (ARM) Template Deployment** + +This method provides an automated deployment of the Trend Vision One connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-trendmicroxdr-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter a unique **Function Name**, **Workspace ID**, **Workspace Key**, **API Token** and **Region Code**. + - Note: Provide the appropriate region code based on where your Trend Vision One instance is deployed: us, eu, au, in, sg, jp + - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/ubiquitiunifi.md b/Tools/Solutions Analyzer/connector-docs/connectors/ubiquitiunifi.md index 6dd4d5300f8..0a189772e7f 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/ubiquitiunifi.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/ubiquitiunifi.md @@ -10,4 +10,66 @@ The [Ubiquiti UniFi](https://www.ui.com/) data connector provides the capability to ingest [Ubiquiti UniFi firewall, dns, ssh, AP events](https://help.ui.com/hc/en-us/articles/204959834-UniFi-How-to-View-Log-Files) into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**UbiquitiAuditEvent**](https://aka.ms/sentinel-UbiquitiUnifi-parser) which is deployed with the Microsoft Sentinel Solution. + +>**NOTE:** This data connector has been developed using Enterprise System Controller Release Version: 5.6.2 (Syslog) + +**1. Install and onboard the agent for Linux or Windows** + +Install the agent on the Server to which the Ubiquiti logs are forwarder from Ubiquiti device (e.g.remote syslog server) + +> Logs from Ubiquiti Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure the logs to be collected** + +Follow the configuration steps below to get Ubiquiti logs into Microsoft Sentinel. Refer to the [Azure Monitor Documentation](https://docs.microsoft.com/azure/azure-monitor/agents/data-sources-json) for more details on these steps. +1. Configure log forwarding on your Ubiquiti controller: + + i. Go to Settings > System Setting > Controller Configuration > Remote Logging and enable the Syslog and Debugging (optional) logs (Refer to [User Guide](https://dl.ui.com/guides/UniFi/UniFi_Controller_V5_UG.pdf) for detailed instructions). +2. Download config file [Ubiquiti.conf](https://aka.ms/sentinel-UbiquitiUnifi-conf). +3. Login to the server where you have installed Azure Log Analytics agent. +4. Copy Ubiquiti.conf to the /etc/opt/microsoft/omsagent/**workspace_id**/conf/omsagent.d/ folder. +5. Edit Ubiquiti.conf as follows: + + i. specify port which you have set your Ubiquiti device to forward logs to (line 4) + + ii. replace **workspace_id** with real value of your Workspace ID (lines 14,15,16,19) +5. Save changes and restart the Azure Log Analytics agent for Linux service with the following command: + sudo /opt/microsoft/omsagent/bin/service_control restart +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/valencesecurity.md b/Tools/Solutions Analyzer/connector-docs/connectors/valencesecurity.md index d810cd3f630..938339f9d89 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/valencesecurity.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/valencesecurity.md @@ -10,4 +10,31 @@ Connects the Valence SaaS security platform Azure Log Analytics via the REST API interface. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Step 1 : Read the detailed documentation** + +The installation process is documented in great detail in [Valence Security's knowledge base](https://support.valencesecurity.com). The user should consult this documentation further to understand installation and debug of the integration. + +**2. Step 2: Retrieve the workspace access credentials** + +The first installation step is to retrieve both your **Workspace ID** and **Primary Key** from the Microsoft Sentinel platform. +Copy the values shown below and save them for configuration of the API log forwarder integration. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Step 3: Configure Sentinel integration on the Valence Security Platform** + +As a Valence Security Platform admin, go to the [configuration screen](https://app.valencesecurity.com/settings/configuration), click Connect in the SIEM Integration card, and choose Microsoft Sentinel. Paste the values from the previous step and click Connect. Valence will test the connection so when success is reported, the connection worked. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/varmourac.md b/Tools/Solutions Analyzer/connector-docs/connectors/varmourac.md index f643888cc2c..d6a1e3f868f 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/varmourac.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/varmourac.md @@ -10,4 +10,63 @@ vArmour reduces operational risk and increases cyber resiliency by visualizing and controlling application relationships across the enterprise. This vArmour connector enables streaming of Application Controller Violation Alerts into Microsoft Sentinel, so you can take advantage of search & correlation, alerting, & threat intelligence enrichment for each log. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Configure the vArmour Application Controller to forward Common Event Format (CEF) logs to the Syslog agent** + +Send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address. +**2.1 Download the vArmour Application Controller user guide** + + Download the user guide from https://support.varmour.com/hc/en-us/articles/360057444831-vArmour-Application-Controller-6-0-User-Guide. + + **2.2 Configure the Application Controller to Send Policy Violations** + + In the user guide - refer to "Configuring Syslog for Monitoring and Violations" and follow steps 1 to 3. +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/varmouracama.md b/Tools/Solutions Analyzer/connector-docs/connectors/varmouracama.md index e7f4bd28a0c..dac26cde6a6 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/varmouracama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/varmouracama.md @@ -10,4 +10,67 @@ vArmour reduces operational risk and increases cyber resiliency by visualizing and controlling application relationships across the enterprise. This vArmour connector enables streaming of Application Controller Violation Alerts into Microsoft Sentinel, so you can take advantage of search & correlation, alerting, & threat intelligence enrichment for each log. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Configure the vArmour Application Controller to forward Common Event Format (CEF) logs to the Syslog agent** + + Send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address. +**1 Download the vArmour Application Controller user guide** + + Download the user guide from https://support.varmour.com/hc/en-us/articles/360057444831-vArmour-Application-Controller-6-0-User-Guide. + + **2 Configure the Application Controller to Send Policy Violations** + + In the user guide - refer to "Configuring Syslog for Monitoring and Violations" and follow steps 1 to 3. + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/varonispurviewpush.md b/Tools/Solutions Analyzer/connector-docs/connectors/varonispurviewpush.md index 4a148df37ae..606a0aa89f6 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/varonispurviewpush.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/varonispurviewpush.md @@ -10,4 +10,40 @@ The [Varonis Purview](https://www.varonis.com/) connector provides the capability to sync resources from Varonis to Microsoft Purview. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Custom Permissions:** +- **Microsoft Entra**: Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher. +- **Microsoft Azure**: Permission to assign Monitoring Metrics Publisher role on data collection rule (DCR). Typically requires Azure RBAC Owner or User Access Administrator role + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Run this to setup ingestion for Varonis Resoources** + +This will create the necessary Log Analytics tables, Data Collection Rule (DCR), and an Entra application to securely send data to the DCR. +#### Automated Configuration and Secure Data Ingestion with Entra Application +Clicking on "Deploy" will trigger the creation of Log Analytics tables and a Data Collection Rule (DCR). +It will then create an Entra application, link the DCR to it, and set the entered secret in the application. This setup enables data to be sent securely to the DCR using an Entra token. +Deploy Varonis connector resources + +**2. Push your logs into the workspace** + +Use the following parameters to configure the Varonis Purview Connector in your Varonis integrations dashboard. +- **Tenant ID (Directory ID)**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Entra App Registration Application ID**: `ApplicationId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Entra App Registration Secret**: `ApplicationSecret` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Data Collection Endpoint Uri**: `DataCollectionEndpoint` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Data Collection Rule Immutable ID**: `DataCollectionRuleId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Resources Stream Name**: `Custom-varonisresources` + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/varonissaas.md b/Tools/Solutions Analyzer/connector-docs/connectors/varonissaas.md index ddebf903129..8d8cf7f252e 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/varonissaas.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/varonissaas.md @@ -14,4 +14,48 @@ Varonis SaaS provides the capability to ingest [Varonis Alerts](https://www.varo Varonis prioritizes deep data visibility, classification capabilities, and automated remediation for data access. Varonis builds a single prioritized view of risk for your data, so you can proactively and systematically eliminate risk from insider threats and cyberattacks. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to Varonis DatAlert service to pull alerts into Microsoft Sentinel. This might result in additional data ingestion costs. See the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +**For Azure function and related services installation use:** + + [![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVaronisSaaS%2FData%2520Connectors%2Fazuredeploy.json) + +STEP 1 - Obtain the Varonis DatAlert Endpoint API credentials. + + To generate the Client ID and API key: + 1. Launch the Varonis Web Interface. + 2. Navigate to Configuration -> API Keys. The API Keys page is displayed. + 3. Click Create API Key. The Add New API Key settings are displayed on the right. + 4. Fill in the name and description. + 5. Click the Generate Key button. + 6. Copy the API key secret and save it in a handy location. You won't be able to copy it again. + +For additional information, please check: [Varonis Documentation](https://help.varonis.com/s/document-item?bundleId=ami1661784208197&topicId=emp1703144742927.html&_LANG=enus) + +STEP 2 - Deploy the connector and the associated Azure Function. +- **Workspace Name**: `WorkspaceName` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +Use this method for automated deployment of the data connector using an ARM Template. + +1. Click the Deploy to Azure button. + + [![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVaronisSaaS%2FData%2520Connectors%2Fazuredeploy.json) +2. Select the preferred Subscription, Resource Group, Region, Storage Account Type. +3. Enter Log Analytics Workspace Name, Varonis FQDN, Varonis SaaS API Key. +4. Click Review + Create, Create. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/vectrastreamama.md b/Tools/Solutions Analyzer/connector-docs/connectors/vectrastreamama.md index a45d0f3cc24..56dd4e65ee2 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/vectrastreamama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/vectrastreamama.md @@ -10,4 +10,99 @@ The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Vectra AI Stream configuration**: must be configured to export Stream metadata in JSON + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution. + +>**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed! + + In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector. +**Please proceed with these steps:** + +**Step 1. Create custom tables in Log Analytic Workspace (ARM Template)** + + 1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json) +2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**) +4. Click **Review + Create** to deploy. + + _Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._ + + **Step 2. Install the Syslog via AMA Data connector** + + _Note: This is only required if it has not been install yet in Microsoft Sentinel._ +1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub. + +2. Search for 'Syslog' (Provider is Microsoft) and select it. + +3. Check 'Install' buton on the bottom of the right panel. + + **Step 3. Configure the Syslog via AMA data connector** + + _Note: Two different Data Collection Rules (DCR) are going to be created during this step_ +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector. + +2. Search for 'Syslog via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE. + +4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE. + +5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE + + + + Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy. + +In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables. +**Please proceed with these steps:** + +**Step 1. Modify the syslog-ng configuration** + + _Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._ +1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf). +2. Log into the instance where syslog-ng/AMA is running. +3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded. +4. Save and restart syslog-ng (_systemctl restart syslog-ng_). + + **Step 2. Modify the Data Collection rules configuration** + + _Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_ + 1. Locate the 2 DCR that you created in Microsoft Sentinel. + 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template. + 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name. + 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded. + 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3). + 6. Save --> Review + Create --> Create. + 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template). + 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name. + 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded. + 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace. + 11. Save --> Review + Create --> Create. + +**2. Configure Vectra AI Stream** + +Configure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA. + +From the Vectra UI, navigate to Settings > Stream and Edit the destination configuration: + + 1. Select Publisher: RAW JSON + 2. Set the server IP or hostname (which is the host whhere AMA is running) + 3. Set all the port to **514**. + 4. Save. + +**3. Run the following command to validate (or set up) that syslog-ng is listening on port 514** + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/vectraxdr.md b/Tools/Solutions Analyzer/connector-docs/connectors/vectraxdr.md index 2ceed60b5c5..27d9854685c 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/vectraxdr.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/vectraxdr.md @@ -10,4 +10,224 @@ The [Vectra XDR](https://www.vectra.ai/) connector gives the capability to ingest Vectra Detections, Audits, Entity Scoring, Lockdown, Health and Entities data into Microsoft Sentinel through the Vectra REST API. Refer to the API documentation: `https://support.vectra.ai/s/article/KB-VS-1666` for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **Vectra Client ID** and **Client Secret** is required for Health, Entity Scoring, Entities, Detections, Lockdown and Audit data collection. See the documentation to learn more about API on the `https://support.vectra.ai/s/article/KB-VS-1666`. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Vectra API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Follow these steps for [Detections Parser](https://aka.ms/sentinel-VectraDetections-parser), [Audits Parser](https://aka.ms/sentinel-VectraAudits-parser), [Entity Scoring Parser](https://aka.ms/sentinel-VectraEntityScoring-parser), [Lockdown Parser](https://aka.ms/sentinel-VectraLockdown-parser) and [Health Parser](https://aka.ms/sentinel-VectraHealth-parser) to create the Kusto functions alias, **VectraDetections**, **VectraAudits**, **VectraEntityScoring**, **VectraLockdown** and **VectraHealth**. + +**STEP 1 - Configuration steps for the Vectra API Credentials** + + Follow these instructions to create a Vectra Client ID and Client Secret. + 1. Log into your Vectra portal + 2. Navigate to Manage -> API Clients + 3. From the API Clients page, select 'Add API Client' to create a new client. + 4. Add Client Name, select Role and click on Generate Credentials to obtain your client credentials. + 5. Be sure to record your Client ID and Secret Key for safekeeping. You will need these two pieces of information to obtain an access token from the Vectra API. An access token is required to make requests to all of the Vectra API endpoints. + +**STEP 2 - App Registration steps for the Application in Microsoft Entra ID** + + This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID: + 1. Sign in to the [Azure portal](https://portal.azure.com/). + 2. Search for and select **Microsoft Entra ID**. + 3. Under **Manage**, select **App registrations > New registration**. + 4. Enter a display **Name** for your application. + 5. Select **Register** to complete the initial app registration. + 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Vectra Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app) + +**STEP 3 - Add a client secret for application in Microsoft Entra ID** + + Sometimes called an application password, a client secret is a string value required for the execution of Vectra Data Connector. Follow the steps in this section to create a new Client Secret: + 1. In the Azure portal, in **App registrations**, select your application. + 2. Select **Certificates & secrets > Client secrets > New client secret**. + 3. Add a description for your client secret. + 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months. + 5. Select **Add**. + 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Vectra Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret) + +**STEP 4 - Get Object ID of your application in Microsoft Entra ID** + + After creating your app registration, follow the steps in this section to get Object ID: + 1. Go to **Microsoft Entra ID**. + 2. Select **Enterprise applications** from the left menu. + 3. Find your newly created application in the list (you can search by the name you provided). + 4. Click on the application. + 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment. + +**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID** + + Follow the steps in this section to assign the role: + 1. In the Azure portal, Go to **Resource Group** and select your resource group. + 2. Go to **Access control (IAM)** from left panel. + 3. Click on **Add**, and then select **Add role assignment**. + 4. Select **Contributor** as role and click on next. + 5. In **Assign access to**, select `User, group, or service principal`. + 6. Click on **add members** and type **your app name** that you have created and select it. + 7. Now click on **Review + assign** and then again click on **Review + assign**. + +> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal) + +**STEP 6 - Create a Keyvault** + + Follow these instructions to create a new Keyvault. + 1. In the Azure portal, Go to **Key vaults** and click on Create. + 2. Select Subsciption, Resource Group and provide unique name of keyvault. + +**STEP 7 - Create Access Policy in Keyvault** + + Follow these instructions to create access policy in Keyvault. + 1. Go to keyvaults, select your keyvault, go to Access policies on left side panel, click on create. + 2. Select all keys & secrets permissions. Click next. + 3. In the principal section, search by application name which was generated in STEP - 2. Click next. + + **Note: **Ensure the Permission model in the Access Configuration of Key Vault is set to **'Vault access policy'** + +**STEP 8 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Vectra data connector, have the Vectra API Authorization Credentials readily available.. + +**9. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Vectra connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy-gov) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the below information : + Function Name + Workspace Name + Vectra Base URL (https://) + Vectra Client Id - Health + Vectra Client Secret Key - Health + Vectra Client Id - Entity Scoring + Vectra Client Secret - Entity Scoring + Vectra Client Id - Detections + Vectra Client Secret - Detections + Vectra Client Id - Audits + Vectra Client Secret - Audits + Vectra Client Id - Lockdown + Vectra Client Secret - Lockdown + Vectra Client Id - Host-Entity + Vectra Client Secret - Host-Entity + Vectra Client Id - Account-Entity + Vectra Client Secret - Account-Entity + Key Vault Name + Azure Client Id + Azure Client Secret + Tenant Id + Azure Entra ObjectID + StartTime (in MM/DD/YYYY HH:MM:SS Format) + Include Score Decrease + Audits Table Name + Detections Table Name + Entity Scoring Table Name + Lockdown Table Name + Health Table Name + Entities Table Name + Exclude Group Details From Detections + Log Level (Default: INFO) + Lockdown Schedule + Health Schedule + Detections Schedule + Audits Schedule + Entity Scoring Schedule + Entities Schedule +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**10. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Vectra data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-VectraXDR320-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. VECTRAXXXXX). + + e. **Select a runtime:** Choose Python 3.8 or above. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective values (case-sensitive): + Workspace ID + Workspace Key + Vectra Base URL (https://) + Vectra Client Id - Health + Vectra Client Secret Key - Health + Vectra Client Id - Entity Scoring + Vectra Client Secret - Entity Scoring + Vectra Client Id - Detections + Vectra Client Secret - Detections + Vectra Client Id - Audits + Vectra Client Secret - Audits + Vectra Client Id - Lockdown + Vectra Client Secret - Lockdown + Vectra Client Id - Host-Entity + Vectra Client Secret - Host-Entity + Vectra Client Id - Account-Entity + Vectra Client Secret - Account-Entity + Key Vault Name + Azure Client Id + Azure Client Secret + Tenant Id + StartTime (in MM/DD/YYYY HH:MM:SS Format) + Include Score Decrease + Audits Table Name + Detections Table Name + Entity Scoring Table Name + Lockdown Table Name + Health Table Name + Entities Table Name + Log Level (Default: INFO) + Lockdown Schedule + Health Schedule + Detections Schedule + Audits Schedule + Entity Scoring Schedule + Entities Schedule + logAnalyticsUri (optional) + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/veeamcustomtablesdataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/veeamcustomtablesdataconnector.md index 3e58a5e3436..350e1c1ec34 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/veeamcustomtablesdataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/veeamcustomtablesdataconnector.md @@ -30,4 +30,37 @@ The connector supports integration with Veeam Backup & Replication, Veeam ONE an - **VeeamSessions_CL**: Veeam sessions +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Veeam Infrastructure Access**: Access to Veeam Backup & Replication REST API and Veeam ONE monitoring platform is required. This includes proper authentication credentials and network connectivity. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to Veeam APIs and pull data into Microsoft Sentinel custom tables. This may result in additional data ingestion costs. See the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +**STEP 1 - Select the deployment option for Veeam Data Connector and associated Azure Functions** + +>**IMPORTANT:** Before you deploy Veeam Data Connector, prepare Workspace Name (can be copied from the following). +- **Workspace Name**: `WorkspaceName` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**2. Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Veeam data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVeeam%2FData%2520Connectors%2Fazuredeploy_Veeam_API_FunctionApp.json) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Microsoft Sentinel Workspace Name**. +4. Click **Review + Create**, **Create**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/virtualmetricdirectorproxy.md b/Tools/Solutions Analyzer/connector-docs/connectors/virtualmetricdirectorproxy.md index 7af60a8214f..f4073e0f452 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/virtualmetricdirectorproxy.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/virtualmetricdirectorproxy.md @@ -10,4 +10,129 @@ VirtualMetric Director Proxy deploys an Azure Function App to securely bridge VirtualMetric DataStream with Azure services including Microsoft Sentinel, Azure Data Explorer, and Azure Storage. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. + +**Custom Permissions:** +- **Azure Function App**: An Azure Function App must be deployed to host the Director Proxy. Requires read, write, and delete permissions on Microsoft.Web/sites resources within your resource group to create and manage the Function App. +- **VirtualMetric DataStream Configuration**: You need VirtualMetric DataStream configured with authentication credentials to connect to the Director Proxy. The Director Proxy acts as a secure bridge between VirtualMetric DataStream and Azure services. +- **Target Azure Services**: Configure your target Azure services such as Microsoft Sentinel Data Collection Endpoints, Azure Data Explorer clusters, or Azure Storage accounts where the Director Proxy will forward data. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Deploy VirtualMetric Director Proxy** + +Deploy the Azure Function App that serves as a secure proxy between VirtualMetric DataStream and Microsoft Sentinel. +**Prerequisites and Deployment Order** + + **Recommended Deployment Order:** + +For optimal configuration, consider deploying the target connectors first: + +1. **Deploy Microsoft Sentinel Connector**: Deploy the VirtualMetric DataStream for Microsoft Sentinel connector first to create the required Data Collection Endpoints and Rules. + +2. **Deploy Microsoft Sentinel data lake Connector** (optional): If using Microsoft Sentinel data lake tables, deploy the VirtualMetric DataStream for Microsoft Sentinel data lake connector. + +3. **Deploy Director Proxy** (this step): The Director Proxy can then be configured with your Microsoft Sentinel targets. + +**Note:** This order is recommended but not required. You can deploy the Director Proxy independently and configure it with your targets later. + + **Deploy Azure Function App** + + Deploy the VirtualMetric Director Proxy Azure Function App using the Deploy to Azure button. + +1. **Deploy to Azure**: + - Click the Deploy to Azure button below to deploy the Function App: + - [![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVirtualMetric%2520DataStream%2FData%2520Connectors%2FVirtualMetric-DirectorProxy%2FDeployToAzure.json) + +2. **Configure Deployment Parameters**: + - **Subscription**: Select your Azure subscription + - **Resource Group**: Choose the same resource group as your Microsoft Sentinel workspace or create a new one + - **Region**: Select the Azure region (should match your Microsoft Sentinel workspace region) + - **Function App Name**: Provide a unique name for the Function App (e.g., "vmetric-director-proxy") + +3. **Complete Deployment**: + - Click **Review + create** to validate the parameters + - Click **Create** to deploy the Function App + - Wait for deployment to complete (typically 3-5 minutes) + - Note the Function App URL: `https://.azurewebsites.net` + + **Configure Function App Permissions** + + Assign the necessary permissions to the Function App's managed identity to access Microsoft Sentinel resources. + +1. **Enable System-Assigned Managed Identity**: + - Navigate to your deployed Function App in Azure Portal + - Go to **Identity** under Settings + - Toggle **Status** to **On** for System assigned identity + - Click **Save** and confirm + +2. **Navigate to Resource Group**: + - Go to the resource group containing your Microsoft Sentinel workspace and Data Collection Endpoints + +3. **Assign Required Roles**: + - Open **Access control (IAM)** + - Click **+ Add** > **Add role assignment** + - Assign the following roles to the Function App's system-assigned managed identity: + - **Monitoring Metrics Publisher**: For sending data to Data Collection Endpoints + - **Monitoring Reader**: For reading Data Collection Rules configuration + +4. **Select the Function App Identity**: + - In **Members** tab, select **Managed identity** + - Choose **Function App** and select your deployed Director Proxy Function App + - Complete the role assignment + +5. **Get Function App Access Token** (Optional for Function Key authentication): + - Navigate to your Function App + - Go to **App keys** under Functions + - Copy the default host key or create a new function key for authentication + + **Configure VirtualMetric DataStream Integration** + + Set up VirtualMetric DataStream to send security telemetry to Microsoft Sentinel through the Director Proxy. + +1. **Access VirtualMetric DataStream Configuration**: + - Log into your **VirtualMetric DataStream** management console + - Navigate to **Targets** section + - Click **Microsoft Sentinel Targets** + - Click **Add new target** or edit an existing Microsoft Sentinel target + +2. **Configure General Settings**: + - **Name**: Enter a name for your target (e.g., "sentinel-with-proxy") + - **Description**: Optionally provide a description for the target configuration + +3. **Configure Azure Authentication**: + + **For Service Principal Authentication:** + - **Managed Identity for Azure**: Keep **Disabled** + - **Tenant ID**: Enter your Azure Active Directory tenant ID + - **Client ID**: Enter your service principal application ID + - **Client Secret**: Enter your service principal client secret + + **For Azure Managed Identity:** + - **Managed Identity for Azure**: Set to **Enabled** + +4. **Configure Director Proxy** (in Azure Properties tab): + - **Endpoint Address**: Enter the Function App URL from Step 2 (format: `https://.azurewebsites.net`) + - **Access Token**: Enter the Function App host key from Step 3 (optional if using Managed Identity) + +5. **Configure Stream Properties**: + - **Endpoint**: Enter the DCE Logs Ingestion URI (format: `https://..ingest.monitor.azure.com`) + - **Streams**: Select **Auto** for automatic stream detection, or configure specific streams if needed + +6. **Verify Data Ingestion in Microsoft Sentinel**: + - Return to your **Log Analytics Workspace** + - Run sample queries to confirm data is being received: + ```kql + CommonSecurityLog + | where TimeGenerated > ago(1h) + | take 10 + ``` + - Check the **Microsoft Sentinel Overview** dashboard for new data sources and event counts + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/virtualmetricmssentinelconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/virtualmetricmssentinelconnector.md index aaa469aa78a..734af70d527 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/virtualmetricmssentinelconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/virtualmetricmssentinelconnector.md @@ -10,4 +10,175 @@ VirtualMetric DataStream connector deploys Data Collection Rules to ingest security telemetry into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. + +**Custom Permissions:** +- **App Registration or Azure Managed Identity**: VirtualMetric DataStream requires an Entra ID identity to authenticate and send logs to Microsoft Sentinel. You can choose between creating an App Registration with Client ID and Client Secret, or using Azure Managed Identity for enhanced security without credential management. +- **Resource Group Role Assignment**: The chosen identity (App Registration or Managed Identity) must be assigned to the resource group containing the Data Collection Endpoint with the following roles: Monitoring Metrics Publisher (for log ingestion) and Monitoring Reader (for reading stream configuration). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Configure VirtualMetric DataStream for Microsoft Sentinel** + +Configure the VirtualMetric DataStream for Microsoft Sentinel to send data. +**Register Application in Microsoft Entra ID (Optional)** + + **Choose your authentication method:** + +**Option A: Use Azure Managed Identity (Recommended)** +- Skip this step if you plan to use Azure Managed Identity for authentication. +- Azure Managed Identity provides a more secure authentication method without managing credentials. + +**Option B: Register a Service Principal Application** + +1. **Open the [Microsoft Entra ID page](https://entra.microsoft.com/)**: + - Click the provided link to open the **Microsoft Entra ID** registration page in a new tab. + - Ensure you are logged in with an account that has **Application Administrator** or **Global Administrator** permissions. + +2. **Create a New Application**: + - In the **Microsoft Entra ID portal**, select **App registrations** from the left-hand navigation. + - Click on **+ New registration**. + - Fill out the following fields: + - **Name**: Enter a descriptive name for the app (e.g., "VirtualMetric ASIM Connector"). + - **Supported account types**: Choose **Accounts in this organizational directory only** (Single tenant). + - **Redirect URI**: Leave this blank. + - Click **Register** to create the application. + +3. **Copy Application and Tenant IDs**: + - Once the app is registered, note the **Application (client) ID** and **Directory (tenant) ID** from the **Overview** page. You'll need these for VirtualMetric DataStream configuration. + +4. **Create a Client Secret**: + - In the **Certificates & secrets** section, click **+ New client secret**. + - Add a description (e.g., 'VirtualMetric ASIM Secret') and set an appropriate expiration period. + - Click **Add**. + - **Copy the client secret value immediately**, as it will not be shown again. Store this securely for VirtualMetric DataStream configuration. + + **Assign Required Permissions** + + Assign the required roles to your chosen authentication method (Service Principal or Managed Identity) in the resource group. + +**For Service Principal (if you completed Step 1):** + +1. **Navigate to Your Resource Group**: + - Open the **Azure Portal** and navigate to the **Resource Group** that contains your **Log Analytics Workspace** and where **Data Collection Rules (DCRs)** will be deployed. + +2. **Assign the Monitoring Metrics Publisher Role**: + - In the **Resource Group**, click on **Access control (IAM)** from the left-hand menu. + - Click **+ Add** and select **Add role assignment**. + - In the **Role** tab, search for and select **Monitoring Metrics Publisher**. + - Click **Next** to go to the **Members** tab. + - Under **Assign access to**, select **User, group, or service principal**. + - Click **+ Select members** and search for your registered application by name or client ID. + - Select your application and click **Select**. + - Click **Review + assign** twice to complete the assignment. + +3. **Assign the Monitoring Reader Role**: + - Repeat the same process to assign the **Monitoring Reader** role: + - Click **+ Add** and select **Add role assignment**. + - In the **Role** tab, search for and select **Monitoring Reader**. + - Follow the same member selection process as above. + - Click **Review + assign** twice to complete the assignment. + +**For Azure Managed Identity:** + +1. **Create or Identify Your Managed Identity**: + - If using **System-assigned Managed Identity**: Enable it on your Azure resource (VM, App Service, etc.). + - If using **User-assigned Managed Identity**: Create one in your resource group if it doesn't exist. + +2. **Assign the Monitoring Metrics Publisher Role**: + - Follow the same steps as above, but in the **Members** tab: + - Under **Assign access to**, select **Managed identity**. + - Click **+ Select members** and choose the appropriate managed identity type and select your identity. + - Click **Select**, then **Review + assign** twice to complete. + +3. **Assign the Monitoring Reader Role**: + - Repeat the process to assign the **Monitoring Reader** role to the same managed identity. + +**Required Permission Summary:** +The assigned roles provide the following capabilities: +- **Monitoring Metrics Publisher**: Write data to Data Collection Endpoints (DCE) and send telemetry through Data Collection Rules (DCR) +- **Monitoring Reader**: Read stream configuration and access Log Analytics workspace for ASIM table ingestion + + **Deploy Azure Infrastructure** + + Deploy the required Data Collection Endpoint (DCE) and Data Collection Rules (DCR) for Microsoft Sentinel tables using our ARM template. + +1. **Deploy to Azure**: + - Click the Deploy to Azure button below to automatically deploy the required infrastructure: + - [![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVirtualMetric%2520DataStream%2FData%2520Connectors%2FVirtualMetric-Sentinel%2FDeployToAzure.json) + - This will take you directly to the Azure portal to start the deployment. + +2. **Configure Deployment Parameters**: + - On the custom deployment page, configure the following settings: + + **Project details:** + - **Subscription**: Select your Azure subscription from the dropdown + - **Resource group**: Select an existing resource group or click **Create new** to create a new one + + **Instance details:** + - **Region**: Select the Azure region where your Log Analytics workspace is located (e.g., West Europe) + - **Workspace**: Enter your Log Analytics workspace name + - **DCE Name**: Provide a name for the Data Collection Endpoint (e.g., "vmetric-dce") + - **DCR Name Prefix**: Provide a prefix for the Data Collection Rules (e.g., "vmetric-dcr") + +3. **Complete the Deployment**: + - Click **Review + create** to validate the template. + - Review the parameters and click **Create** to deploy the resources. + - Wait for the deployment to complete (typically takes 2-5 minutes). + +4. **Verify Deployed Resources**: + - After deployment, verify the following resources were created: + - **Data Collection Endpoint (DCE)**: Check **Azure Portal > Monitor > Data Collection Endpoints** + - **Data Collection Rules (DCRs)**: Check **Azure Portal > Monitor > Data Collection Rules** + - **Copy the DCE Logs Ingestion URI** from the DCE **Overview** page (format: `https://..ingest.monitor.azure.com`) + - **Copy the DCE Resource ID** from the DCE **Overview** page (format: `/subscriptions//resourceGroups//providers/Microsoft.Insights/dataCollectionEndpoints/`) + - For each DCR, note the **Immutable ID** from the **Overview** page - you'll need these for VirtualMetric DataStream configuration. + + **Configure VirtualMetric DataStream Integration** + + Set up VirtualMetric DataStream to send security telemetry to Microsoft Sentinel tables. + +1. **Access VirtualMetric DataStream Configuration**: + - Log into your **VirtualMetric DataStream** management console. + - Navigate to **Fleet Management** > **Targets** section. + - Click **Add new target** button. + - Select **Microsoft Sentinel** target. + +2. **Configure General Settings**: + - **Name**: Enter a name for your target (e.g., "cus01-ms-sentinel") + - **Description**: Optionally provide a description for the target configuration + +3. **Configure Azure Authentication** (choose based on Step 1): + + **For Service Principal Authentication:** + - **Managed Identity for Azure**: Keep **Disabled** + - **Tenant ID**: Enter the Directory (tenant) ID from Step 1 + - **Client ID**: Enter the Application (client) ID from Step 1 + - **Client Secret**: Enter the client secret value from Step 1 + + **For Azure Managed Identity:** + - **Managed Identity for Azure**: Set to **Enabled** + +4. **Configure Stream Properties**: + - **Endpoint**: Choose your configuration method: + - **For manual stream configuration**: Enter the DCE Logs Ingestion URI (format: `https://..ingest.monitor.azure.com`) + - **For auto stream detection**: Enter the DCE Resource ID (format: `/subscriptions//resourceGroups//providers/Microsoft.Insights/dataCollectionEndpoints/`) + - **Streams**: Select **Auto** for automatic stream detection, or configure specific streams if needed + +5. **Verify Data Ingestion in Microsoft Sentinel**: + - Return to your **Log Analytics Workspace** + - Run sample queries on the ASIM tables to confirm data is being received: + ```kql + ASimNetworkSessionLogs + | where TimeGenerated > ago(1h) + | take 10 + ``` + - Check the **Microsoft Sentinel Overview** dashboard for new data sources and event counts. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/virtualmetricmssentineldatalakeconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/virtualmetricmssentineldatalakeconnector.md index 5eba2c2b33d..b8bcba9cccf 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/virtualmetricmssentineldatalakeconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/virtualmetricmssentineldatalakeconnector.md @@ -10,4 +10,175 @@ VirtualMetric DataStream connector deploys Data Collection Rules to ingest security telemetry into Microsoft Sentinel data lake. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. + +**Custom Permissions:** +- **App Registration or Azure Managed Identity**: VirtualMetric DataStream requires an Entra ID identity to authenticate and send logs to Microsoft Sentinel data lake. You can choose between creating an App Registration with Client ID and Client Secret, or using Azure Managed Identity for enhanced security without credential management. +- **Resource Group Role Assignment**: The chosen identity (App Registration or Managed Identity) must be assigned to the resource group containing the Data Collection Endpoint with the following roles: Monitoring Metrics Publisher (for log ingestion) and Monitoring Reader (for reading stream configuration). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Configure VirtualMetric DataStream for Microsoft Sentinel data lake** + +Configure the VirtualMetric DataStream for Microsoft Sentinel data lake to send data. +**Register Application in Microsoft Entra ID (Optional)** + + **Choose your authentication method:** + +**Option A: Use Azure Managed Identity (Recommended)** +- Skip this step if you plan to use Azure Managed Identity for authentication. +- Azure Managed Identity provides a more secure authentication method without managing credentials. + +**Option B: Register a Service Principal Application** + +1. **Open the [Microsoft Entra ID page](https://entra.microsoft.com/)**: + - Click the provided link to open the **Microsoft Entra ID** registration page in a new tab. + - Ensure you are logged in with an account that has **Application Administrator** or **Global Administrator** permissions. + +2. **Create a New Application**: + - In the **Microsoft Entra ID portal**, select **App registrations** from the left-hand navigation. + - Click on **+ New registration**. + - Fill out the following fields: + - **Name**: Enter a descriptive name for the app (e.g., "VirtualMetric ASIM Connector"). + - **Supported account types**: Choose **Accounts in this organizational directory only** (Single tenant). + - **Redirect URI**: Leave this blank. + - Click **Register** to create the application. + +3. **Copy Application and Tenant IDs**: + - Once the app is registered, note the **Application (client) ID** and **Directory (tenant) ID** from the **Overview** page. You'll need these for VirtualMetric DataStream configuration. + +4. **Create a Client Secret**: + - In the **Certificates & secrets** section, click **+ New client secret**. + - Add a description (e.g., 'VirtualMetric ASIM Secret') and set an appropriate expiration period. + - Click **Add**. + - **Copy the client secret value immediately**, as it will not be shown again. Store this securely for VirtualMetric DataStream configuration. + + **Assign Required Permissions** + + Assign the required roles to your chosen authentication method (Service Principal or Managed Identity) in the resource group. + +**For Service Principal (if you completed Step 1):** + +1. **Navigate to Your Resource Group**: + - Open the **Azure Portal** and navigate to the **Resource Group** that contains your **Log Analytics Workspace** and where **Data Collection Rules (DCRs)** will be deployed. + +2. **Assign the Monitoring Metrics Publisher Role**: + - In the **Resource Group**, click on **Access control (IAM)** from the left-hand menu. + - Click **+ Add** and select **Add role assignment**. + - In the **Role** tab, search for and select **Monitoring Metrics Publisher**. + - Click **Next** to go to the **Members** tab. + - Under **Assign access to**, select **User, group, or service principal**. + - Click **+ Select members** and search for your registered application by name or client ID. + - Select your application and click **Select**. + - Click **Review + assign** twice to complete the assignment. + +3. **Assign the Monitoring Reader Role**: + - Repeat the same process to assign the **Monitoring Reader** role: + - Click **+ Add** and select **Add role assignment**. + - In the **Role** tab, search for and select **Monitoring Reader**. + - Follow the same member selection process as above. + - Click **Review + assign** twice to complete the assignment. + +**For Azure Managed Identity:** + +1. **Create or Identify Your Managed Identity**: + - If using **System-assigned Managed Identity**: Enable it on your Azure resource (VM, App Service, etc.). + - If using **User-assigned Managed Identity**: Create one in your resource group if it doesn't exist. + +2. **Assign the Monitoring Metrics Publisher Role**: + - Follow the same steps as above, but in the **Members** tab: + - Under **Assign access to**, select **Managed identity**. + - Click **+ Select members** and choose the appropriate managed identity type and select your identity. + - Click **Select**, then **Review + assign** twice to complete. + +3. **Assign the Monitoring Reader Role**: + - Repeat the process to assign the **Monitoring Reader** role to the same managed identity. + +**Required Permission Summary:** +The assigned roles provide the following capabilities: +- **Monitoring Metrics Publisher**: Write data to Data Collection Endpoints (DCE) and send telemetry through Data Collection Rules (DCR) +- **Monitoring Reader**: Read stream configuration and access Log Analytics workspace for ASIM table ingestion + + **Deploy Azure Infrastructure** + + Deploy the required Data Collection Endpoint (DCE) and Data Collection Rules (DCR) for Microsoft Sentinel data lake tables using our ARM template. + +1. **Deploy to Azure**: + - Click the Deploy to Azure button below to automatically deploy the required infrastructure: + - [![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVirtualMetric%2520DataStream%2FData%2520Connectors%2FVirtualMetric-SentinelDataLake%2FDeployToAzure.json) + - This will take you directly to the Azure portal to start the deployment. + +2. **Configure Deployment Parameters**: + - On the custom deployment page, configure the following settings: + + **Project details:** + - **Subscription**: Select your Azure subscription from the dropdown + - **Resource group**: Select an existing resource group or click **Create new** to create a new one + + **Instance details:** + - **Region**: Select the Azure region where your Log Analytics workspace is located (e.g., West Europe) + - **Workspace**: Enter your Log Analytics workspace name + - **DCE Name**: Provide a name for the Data Collection Endpoint (e.g., "vmetric-dce") + - **DCR Name Prefix**: Provide a prefix for the Data Collection Rules (e.g., "vmetric-dcr") + +3. **Complete the Deployment**: + - Click **Review + create** to validate the template. + - Review the parameters and click **Create** to deploy the resources. + - Wait for the deployment to complete (typically takes 2-5 minutes). + +4. **Verify Deployed Resources**: + - After deployment, verify the following resources were created: + - **Data Collection Endpoint (DCE)**: Check **Azure Portal > Monitor > Data Collection Endpoints** + - **Data Collection Rules (DCRs)**: Check **Azure Portal > Monitor > Data Collection Rules** + - **Copy the DCE Logs Ingestion URI** from the DCE **Overview** page (format: `https://..ingest.monitor.azure.com`) + - **Copy the DCE Resource ID** from the DCE **Overview** page (format: `/subscriptions//resourceGroups//providers/Microsoft.Insights/dataCollectionEndpoints/`) + - For each DCR, note the **Immutable ID** from the **Overview** page - you'll need these for VirtualMetric DataStream configuration. + + **Configure VirtualMetric DataStream Integration** + + Set up VirtualMetric DataStream to send security telemetry to Microsoft Sentinel data lake tables. + +1. **Access VirtualMetric DataStream Configuration**: + - Log into your **VirtualMetric DataStream** management console. + - Navigate to **Fleet Management** > **Targets** section. + - Click **Add new target** button. + - Select **Microsoft Sentinel** target. + +2. **Configure General Settings**: + - **Name**: Enter a name for your target (e.g., "cus01-ms-sentinel") + - **Description**: Optionally provide a description for the target configuration + +3. **Configure Azure Authentication** (choose based on Step 1): + + **For Service Principal Authentication:** + - **Managed Identity for Azure**: Keep **Disabled** + - **Tenant ID**: Enter the Directory (tenant) ID from Step 1 + - **Client ID**: Enter the Application (client) ID from Step 1 + - **Client Secret**: Enter the client secret value from Step 1 + + **For Azure Managed Identity:** + - **Managed Identity for Azure**: Set to **Enabled** + +4. **Configure Stream Properties**: + - **Endpoint**: Choose your configuration method: + - **For manual stream configuration**: Enter the DCE Logs Ingestion URI (format: `https://..ingest.monitor.azure.com`) + - **For auto stream detection**: Enter the DCE Resource ID (format: `/subscriptions//resourceGroups//providers/Microsoft.Insights/dataCollectionEndpoints/`) + - **Streams**: Select **Auto** for automatic stream detection, or configure specific streams if needed + +5. **Verify Data Ingestion in Microsoft Sentinel data lake**: + - Return to your **Log Analytics Workspace** + - Run sample queries on the ASIM tables to confirm data is being received: + ```kql + ASimNetworkSessionLogs + | where TimeGenerated > ago(1h) + | take 10 + ``` + - Check the **Microsoft Sentinel Overview** dashboard for new data sources and event counts. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/vmray.md b/Tools/Solutions Analyzer/connector-docs/connectors/vmray.md index 43f9bb13420..0b52e029691 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/vmray.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/vmray.md @@ -10,4 +10,40 @@ VMRayThreatIntelligence connector automatically generates and feeds threat intelligence for all submissions to VMRay, improving threat detection and incident response in Sentinel. This seamless integration empowers teams to proactively address emerging threats. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Azure Subscription**: Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group. +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **VMRay API Key** is required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the VMRay API to pull VMRay Threat IOCs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**1. Deploy VMRay Threat Intelligence Connector** + +1. Ensure you have all the required prerequisites: **Client ID**, **Tenant ID**, **Client Secret**, **VMRay API Key**, and **VMRay Base URL**. +2. To obtain the Client ID, Client Secret, and Tenant ID, [follow these instructions](https://github.com/Azure/Azure-Sentinel/tree/master/Solutions/VMRay#vmray-configurations) +3. For the **Flex Consumption Plan**, click the **Deploy to Azure** button below: + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-VMRay-azuredeployflex) + +4. For the **Premium Plan**, click the **Deploy to Azure** button below: + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-VMRay-azuredeploypremium). + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/vmwarecarbonblack.md b/Tools/Solutions Analyzer/connector-docs/connectors/vmwarecarbonblack.md index 8880b548eef..35539fbd170 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/vmwarecarbonblack.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/vmwarecarbonblack.md @@ -10,4 +10,101 @@ The [VMware Carbon Black Cloud](https://www.vmware.com/products/carbon-black-cloud.html) connector provides the capability to ingest Carbon Black data into Microsoft Sentinel. The connector provides visibility into Audit, Notification and Event logs in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **VMware Carbon Black API Key(s)**: Carbon Black API and/or SIEM Level API Key(s) are required. See the documentation to learn more about the [Carbon Black API](https://developer.carbonblack.com/reference/carbon-black-cloud/cb-defense/latest/rest-api/). + - A Carbon Black **API** access level API ID and Key is required for [Audit](https://developer.carbonblack.com/reference/carbon-black-cloud/cb-defense/latest/rest-api/#audit-log-events) and [Event](https://developer.carbonblack.com/reference/carbon-black-cloud/platform/latest/data-forwarder-config-api/) logs. + - A Carbon Black **SIEM** access level API ID and Key is required for [Notification](https://developer.carbonblack.com/reference/carbon-black-cloud/cb-defense/latest/rest-api/#notifications) alerts. +- **Amazon S3 REST API Credentials/permissions**: **AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name**, **Folder Name in AWS S3 Bucket** are required for Amazon S3 REST API. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to VMware Carbon Black to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Configuration steps for the VMware Carbon Black API** + + [Follow these instructions](https://developer.carbonblack.com/reference/carbon-black-cloud/authentication/#creating-an-api-key) to create an API Key. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the VMware Carbon Black connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the VMware Carbon Black API Authorization Key(s), readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +This method provides an automated deployment of the VMware Carbon Black connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelcarbonblackazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelcarbonblackazuredeploy-gov) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID**, **Workspace Key**, **Log Types**, **API ID(s)**, **API Key(s)**, **Carbon Black Org Key**, **S3 Bucket Name**, **AWS Access Key Id**, **AWS Secret Access Key**, **EventPrefixFolderName**,**AlertPrefixFolderName**, and validate the **URI**. +> - Enter the URI that corresponds to your region. The complete list of API URLs can be [found here](https://community.carbonblack.com/t5/Knowledge-Base/PSC-What-URLs-are-used-to-access-the-APIs/ta-p/67346) + - The default **Time Interval** is set to pull the last five (5) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. + - Carbon Black requires a seperate set of API ID/Keys to ingest Notification alerts. Enter the SIEM API ID/Key values or leave blank, if not required. +> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the VMware Carbon Black connector manually with Azure Functions. + +**1. Create a Function App** + +1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp), and select **+ Add**. +2. In the **Basics** tab, ensure Runtime stack is set to **Powershell Core**. +3. In the **Hosting** tab, ensure the **Consumption (Serverless)** plan type is selected. +4. Make other preferrable configuration changes, if needed, then click **Create**. + +**2. Import Function App Code** + +1. In the newly created Function App, select **Functions** on the left pane and click **+ Add**. +2. Select **Timer Trigger**. +3. Enter a unique Function **Name** and modify the cron schedule, if needed. The default value is set to run the Function App every 5 minutes. (Note: the Timer trigger should match the `timeInterval` value below to prevent overlapping data), click **Create**. +4. Click on **Code + Test** on the left pane. +5. Copy the [Function App Code](https://aka.ms/sentinelcarbonblackazurefunctioncode) and paste into the Function App `run.ps1` editor. +5. Click **Save**. + +**3. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following thirteen to sixteen (13-16) application settings individually, with their respective string values (case-sensitive): + apiId + apiKey + workspaceID + workspaceKey + uri + timeInterval + CarbonBlackOrgKey + CarbonBlackLogTypes + s3BucketName + EventPrefixFolderName + AlertPrefixFolderName + AWSAccessKeyId + AWSSecretAccessKey + SIEMapiId (Optional) + SIEMapiKey (Optional) + logAnalyticsUri (optional) +> - Enter the URI that corresponds to your region. The complete list of API URLs can be [found here](https://community.carbonblack.com/t5/Knowledge-Base/PSC-What-URLs-are-used-to-access-the-APIs/ta-p/67346). The `uri` value must follow the following schema: `https://.conferdeploy.net` - There is no need to add a time suffix to the URI, the Function App will dynamically append the Time Value to the URI in the proper format. +> - Set the `timeInterval` (in minutes) to the default value of `5` to correspond to the default Timer Trigger of every `5` minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly to prevent overlapping data ingestion. +> - Carbon Black requires a seperate set of API ID/Keys to ingest Notification alerts. Enter the `SIEMapiId` and `SIEMapiKey` values, if needed, or omit, if not required. +> - Note: If using Azure Key Vault, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. +> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us` +4. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/vmwareesxi.md b/Tools/Solutions Analyzer/connector-docs/connectors/vmwareesxi.md index 5cc05d4af9f..55b3de4cdb2 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/vmwareesxi.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/vmwareesxi.md @@ -10,4 +10,50 @@ The [VMware ESXi](https://www.vmware.com/products/esxi-and-esx.html) connector allows you to easily connect your VMWare ESXi logs with Microsoft Sentinel This gives you more insight into your organization's ESXi servers and improves your security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +**Custom Permissions:** +- **VMwareESXi**: must be configured to export logs via Syslog + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias VMwareESXi and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMWareESXi/Parsers/VMwareESXi.yaml), on the second line of the query, enter the hostname(s) of your VMwareESXi device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. + 2. Select **Apply below configuration to my machines** and select the facilities and severities. + 3. Click **Save**. +- **Open Syslog settings** + +**3. Configure and connect the VMware ESXi** + +1. Follow these instructions to configure the VMWare ESXi to forward syslog: + - [VMware ESXi 3.5 and 4.x](https://kb.vmware.com/s/article/1016621) + - [VMware ESXi 5.0+](https://docs.vmware.com/en/VMware-vSphere/5.5/com.vmware.vsphere.monitoring.doc/GUID-9F67DB52-F469-451F-B6C8-DAE8D95976E7.html) +2. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/vmwaresdwan.md b/Tools/Solutions Analyzer/connector-docs/connectors/vmwaresdwan.md index 406908c759f..6d2c5beb075 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/vmwaresdwan.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/vmwaresdwan.md @@ -10,4 +10,93 @@ The [VMware SD-WAN & SASE](https://sase.vmware.com) data connector offers the capability to ingest VMware SD-WAN and CWS events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://developer.vmware.com/apis/vmware-sase-platform/) for more information. The connector provides ability to get events which helps to examine potential network security issues, identify misconfigured network devices and monitor SD-WAN and SASE usage. If you have your own custom connector, make sure that the connector is deployed under an isolated Log Analytics Workspace first. In case of issues, questions or feature requests, please contact us via email on sase-siem-integration@vmware.com. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **api_veco_authorization**, **api_veco_fqdn** is required for REST API. [See the documentation to learn more about VMware SASE APIs](https://developer.vmware.com/apis/vmware-sase-platform/). Check all [requirements and follow the instructions](https://docs.vmware.com/en/VMware-SD-WAN/5.3/VMware-SD-WAN-Administration-Guide/GUID-2FA3763F-835C-4D10-A32B-450FEB5397D8.html) for obtaining credentials. The Function App only supports token-based API authentication. Be advised that the API Token generated will inherit the access rights of the user account under which it was generated. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the VMware Edge Cloud Orchestrator REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +**STEP 1 - Configuration steps for the VECO API** + + [Follow the instructions](https://docs.vmware.com/en/VMware-SD-WAN/5.3/VMware-SD-WAN-Administration-Guide/GUID-2FA3763F-835C-4D10-A32B-450FEB5397D8.html) to create and obtain the credentials. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function.** + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the VMware SD-WAN and SASE Connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelvmwaresdwan) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter or modify the Function App, Log Analytics and Azure Monitor settings, enter the VECO FQDN (without https://, for example vco123-usvi1.velocloud.net), enter the API token created (including "Token " at the beginning of the string), and adjust your desired Function App freaquency, then deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the VMware SD-WAN and SASE Connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-vmwaresdwan-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. vmwsase-siemXXXXXXXXXXXXX). + + e. **Select a runtime:** Choose Python 3.10. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab . +3. Check if the application has these settings defined correctly and adjust if needed: + api_veco_authorization + api_veco_fqdn + app_frequency_mins + azsa_share_connectionstring + azsa_share_name dce_endpoint + dcr_cwsdlplog_immutableid + dcr_cwshealth_immutableid + dcr_cwsweblog_immutableid + dcr_efsfwlog_immutableid + dcr_efshealth_immutableid + dcr_saseaudit_immutableid + stream_cwsdlplog + stream_cwshealth + stream_cwsweblog + stream_efsfwlog + stream_efshealth + stream_saseaudit +3. In case you made changes to application settings have been entered, make sure that you click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/vmwarevcenter.md b/Tools/Solutions Analyzer/connector-docs/connectors/vmwarevcenter.md index 8907edc5b84..2f147e983b4 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/vmwarevcenter.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/vmwarevcenter.md @@ -10,4 +10,85 @@ The [vCenter](https://www.vmware.com/in/products/vcenter-server.html) connector allows you to easily connect your vCenter server logs with Microsoft Sentinel. This gives you more insight into your organization's data centers and improves your security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Include custom pre-requisites if the connectivity requires - else delete customs**: Description for any custom pre-requisite + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias VMware vCenter and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20vCenter/Parsers/vCenter.txt), on the second line of the query, enter the hostname(s) of your VMware vCenter device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. +> 1. If you have not installed the vCenter solution from ContentHub then [Follow the steps](https://aka.ms/sentinel-vCenter-parser) to use the Kusto function alias, **vCenter** + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Follow the configuration steps below to get vCenter server logs into Microsoft Sentinel. Refer to the [Azure Monitor Documentation](https://docs.microsoft.com/azure/azure-monitor/agents/data-sources-json) for more details on these steps. + For vCenter Server logs, we have issues while parsing the data by OMS agent data using default settings. +So we advice to capture the logs into custom table **vcenter_CL** using below instructions. +1. Login to the server where you have installed OMS agent. +2. Download config file vCenter.conf + wget -v https://aka.ms/sentinel-vcenteroms-conf -O vcenter.conf +3. Copy vcenter.conf to the /etc/opt/microsoft/omsagent/**workspace_id**/conf/omsagent.d/ folder. + cp vcenter.conf /etc/opt/microsoft/omsagent/<>/conf/omsagent.d/ +4. Edit vcenter.conf as follows: + + a. vcenter.conf uses the port **22033** by default. Ensure this port is not being used by any other source on your server + + b. If you would like to change the default port for **vcenter.conf** make sure that you dont use default Azure monotoring /log analytic agent ports I.e.(For example CEF uses TCP port **25226** or **25224**) + + c. replace **workspace_id** with real value of your Workspace ID (lines 13,14,15,18) +5. Save changes and restart the Azure Log Analytics agent for Linux service with the following command: + sudo /opt/microsoft/omsagent/bin/service_control restart +6. Modify /etc/rsyslog.conf file - add below template preferably at the beginning / before directives section + + $template vcenter,"%timestamp% %hostname% %msg%\ n" + + **Note - There is no space between slash(\\) and character 'n' in above command.** + + 7. Create a custom conf file in /etc/rsyslog.d/ for example 10-vcenter.conf and add following filter conditions. + +Download config file [10-vCenter.conf](https://aka.ms/sentinel-vcenter-conf) + + With an added statement you will need to create a filter which will specify the logs coming from the vcenter server to be forwarded to the custom table. + + reference: [Filter Conditions — rsyslog 8.18.0.master documentation](https://rsyslog.readthedocs.io/en/latest/configuration/filters.html) + + Here is an example of filtering that can be defined, this is not complete and will require additional testing for each installation. + if $rawmsg contains "vcenter-server" then @@127.0.0.1:22033;vcenter + & stop + if $rawmsg contains "vpxd" then @@127.0.0.1:22033;vcenter + & stop + +8. Restart rsyslog + systemctl restart rsyslog +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Configure and connect the vCenter device(s)** + +[Follow these instructions](https://docs.vmware.com/en/VMware-vSphere/7.0/com.vmware.vsphere.monitoring.doc/GUID-9633A961-A5C3-4658-B099-B81E0512DC21.html) to configure the vCenter to forward syslog. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/votiro.md b/Tools/Solutions Analyzer/connector-docs/connectors/votiro.md index 301757baa02..5b85c21d454 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/votiro.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/votiro.md @@ -10,4 +10,57 @@ The Votiro data connector allows you to easily connect your Votiro Event logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. Using Votiro on Microsoft Sentinel will provide you more insights into the sanitization results of files. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Common Event Format (CEF) logs to Syslog agent** + +Set Votiro Endpoints to send Syslog messages in CEF format to the Forwarder machine. Make sure you to send the logs to port 514 TCP on the Forwarder machine's IP address. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/waf.md b/Tools/Solutions Analyzer/connector-docs/connectors/waf.md index de1d026a08d..243a909875a 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/waf.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/waf.md @@ -10,4 +10,31 @@ Connect to the Azure Web Application Firewall (WAF) for Application Gateway, Front Door, or CDN. This WAF protects your applications from common web vulnerabilities such as SQL injection and cross-site scripting, and lets you customize rules to reduce false positives. Follow these instructions to stream your Microsoft Web application firewall logs into Microsoft Sentinel. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2223546&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Azure WAF to Microsoft Sentinel** + +Go to each WAF resource type and choose your WAF. +- **Configure Web Application Firewall** +- **Configure Azure Front Door** +- **Configure CDN Profile** + +Inside your WAF resource: + +1. Select **Diagnostic logs.​** +2. Select **+ Add diagnostic setting.​** +3. In the **Diagnostic setting** blade: + - Type a **Name**. + - Select **Send to Log Analytics**. + - Choose the log destination workspace.​ + - Select the categories that you want to analyze (recommended: ApplicationGatewayAccessLog, ApplicationGatewayFirewallLog, FrontdoorAccessLog, FrontdoorWebApplicationFirewallLog, WebApplicationFirewallLogs).​ + - Click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/watchguardfirebox.md b/Tools/Solutions Analyzer/connector-docs/connectors/watchguardfirebox.md index 0ba32ece3cf..0330699dfe9 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/watchguardfirebox.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/watchguardfirebox.md @@ -10,4 +10,41 @@ WatchGuard Firebox (https://www.watchguard.com/wgrd-products/firewall-appliances and https://www.watchguard.com/wgrd-products/cloud-and-virtual-firewalls) is security products/firewall-appliances. Watchguard Firebox will send syslog to Watchguard Firebox collector agent.The agent then sends the message to the workspace. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias WatchGuardFirebox and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Watchguard%20Firebox/Parsers/WatchGuardFirebox.txt) on the second line of the query, enter the hostname(s) of your WatchGuard Firebox device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + +1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. +2. Select **Apply below configuration to my machines** and select the facilities and severities. +3. Click **Save**. +- **Open Syslog settings** + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/windowsfirewall.md b/Tools/Solutions Analyzer/connector-docs/connectors/windowsfirewall.md index dd762cbb9e2..f1e02d7a241 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/windowsfirewall.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/windowsfirewall.md @@ -10,4 +10,32 @@ Windows Firewall is a Microsoft Windows application that filters information coming to your system from the Internet and blocking potentially harmful programs. The software blocks most programs from communicating through the firewall. Users simply add a program to the list of allowed programs to allow it to communicate through the firewall. When using a public network, Windows Firewall can also secure the system by blocking all unsolicited attempts to connect to your computer. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2219791&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. +- **Solutions** (ResourceGroup): [read and write permissions](https://docs.microsoft.com/azure/role-based-access-control/built-in-roles#log-analytics-contributor). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Download and install the agent** + +> Windows Firewall logs are collected only from **Windows** agents. +**Choose where to install the agent:** + +**Install agent on Azure Windows Virtual Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on non-Azure Windows Machine** + + Select the machine to install the agent and then click **Connect**. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Install Windows Firewall solution** +- Install solution: WindowsFirewall + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/windowsfirewallama.md b/Tools/Solutions Analyzer/connector-docs/connectors/windowsfirewallama.md index 4a254be273b..ec25b47f608 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/windowsfirewallama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/windowsfirewallama.md @@ -22,4 +22,22 @@ For more information, see the following articles: - [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2228623&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci) +## Permissions + +**Resource Provider Permissions:** +- **Workspace data sources** (Workspace): read and write permissions. + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Enable data collection rule** + +> Windows Firewall events are collected only from Windows agents. +- Configure WindowsFirewallAma data connector + +- **Create data collection rule** + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/windowsforwardedevents.md b/Tools/Solutions Analyzer/connector-docs/connectors/windowsforwardedevents.md index 49d0d36e1a5..7770416d32d 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/windowsforwardedevents.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/windowsforwardedevents.md @@ -14,4 +14,22 @@ You can stream all Windows Event Forwarding (WEF) logs from the Windows Servers This gives you more insight into your organization’s network and improves your security operation capabilities. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2219963&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +## Permissions + +**Resource Provider Permissions:** +- **Workspace data sources** (Workspace): read and write permissions. + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Enable data collection rule​** + +> Windows Forwarded Events logs are collected only from **Windows** agents. +- Configure WindowsForwardedEvents data connector +- **Create data collection rule** +- **Install/configure: OpenCustomDeploymentBlade** + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/windowssecurityevents.md b/Tools/Solutions Analyzer/connector-docs/connectors/windowssecurityevents.md index 11b6fe1c276..b6c27e88732 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/windowssecurityevents.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/windowssecurityevents.md @@ -10,4 +10,22 @@ You can stream all security events from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization’s network and improves your security operation capabilities. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220225&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +## Permissions + +**Resource Provider Permissions:** +- **Workspace data sources** (Workspace): read and write permissions. + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Enable data collection rule​** + +> Security Events logs are collected only from **Windows** agents. +- Configure WindowsSecurityEvents data connector + +- **Create data collection rule** + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/wirex-systems-nfp.md b/Tools/Solutions Analyzer/connector-docs/connectors/wirex-systems-nfp.md index b8579cbcdfc..1fe424e66af 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/wirex-systems-nfp.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/wirex-systems-nfp.md @@ -10,4 +10,57 @@ The WireX Systems data connector allows security professional to integrate with Microsoft Sentinel to allow you to further enrich your forensics investigations; to not only encompass the contextual content offered by WireX but to analyze data from other sources, and to create custom dashboards to give the most complete picture during a forensic investigation and to create custom workflows. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Common Event Format (CEF) logs to Syslog agent** + +Contact WireX support (https://wirexsystems.com/contact-us/) in order to configure your NFP solution to send Syslog messages in CEF format to the proxy machine. Make sure that they central manager can send the logs to port 514 TCP on the machine's IP address. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/wirex-systems-nfpama.md b/Tools/Solutions Analyzer/connector-docs/connectors/wirex-systems-nfpama.md index 1993b2df85e..f3a7c22436e 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/wirex-systems-nfpama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/wirex-systems-nfpama.md @@ -10,4 +10,61 @@ The WireX Systems data connector allows security professional to integrate with Microsoft Sentinel to allow you to further enrich your forensics investigations; to not only encompass the contextual content offered by WireX but to analyze data from other sources, and to create custom dashboards to give the most complete picture during a forensic investigation and to create custom workflows. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Common Event Format (CEF) logs to Syslog agent** + + Contact WireX support (https://wirexsystems.com/contact-us/) in order to configure your NFP solution to send Syslog messages in CEF format to the proxy machine. Make sure that they central manager can send the logs to port 514 TCP on the machine's IP address. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/withsecureelementsviaconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/withsecureelementsviaconnector.md index 04fa478c88b..4e15b039e25 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/withsecureelementsviaconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/withsecureelementsviaconnector.md @@ -16,4 +16,79 @@ It requires deploying "Elements Connector" either on-prem or in cloud. The Common Event Format (CEF) provides natively search & correlation, alerting and threat intelligence enrichment for each data log. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your WithSecurity solution and Sentinel. The machine can be on-prem environment, Microsoft Azure or other cloud based. +> Linux needs to have `syslog-ng` and `python`/`python3` installed. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` + - **For python3 use command below:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python3 cef_installer.py {0} {1}` +**2. Forward data from WithSecure Elements Connector to Syslog agent** + +This describes how to install and configure Elements Connector step by step. +**2.1 Order Connector subscription** + + If Connector subscription has not been ordered yet go to EPP in Elements Portal. Then navigate to Downloads and in Elements Connector section click 'Create subscription key' button. You can check Your subscription key in Subscriptions. + + **2.2 Download Connector** + + Go to Downloads and in WithSecure Elements Connector section select correct installer. + + **2.3 Create management API key** + + When in EPP open account settings in top right corner. Then select Get management API key. If key has been created earlier it can be read there as well. + + **2.4 Install Connector** + + To install Elements Connector follow [Elements Connector Docs](https://www.withsecure.com/userguides/product.html#business/connector/latest/en/). + + **2.5 Configure event forwarding** + + If api access has not been configured during installation follow [Configuring API access for Elements Connector](https://www.withsecure.com/userguides/product.html#business/connector/latest/en/task_F657F4D0F2144CD5913EE510E155E234-latest-en). +Then go to EPP, then Profiles, then use For Connector from where you can see the connector profiles. Create a new profile (or edit an existing not read-only profile). In Event forwarding enable it. SIEM system address: **127.0.0.1:514**. Set format to **Common Event Format**. Protocol is **TCP**. Save profile and assign it to Elements Connector in Devices tab. +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` +- **For python3 use command below:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python3 cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/withsecureelementsviafunction.md b/Tools/Solutions Analyzer/connector-docs/connectors/withsecureelementsviafunction.md index f4f036569f2..d48486ee6c3 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/withsecureelementsviafunction.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/withsecureelementsviafunction.md @@ -30,4 +30,47 @@ With this solution Azure Function is deployed to your tenant, polling periodical For more information visit our website at: [https://www.withsecure.com](https://www.withsecure.com). +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **WithSecure Elements API client credentials**: Client credentials are required. [See the documentation to learn more.](https://connect.withsecure.com/getting-started/elements#getting-client-credentials) + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Create WithSecure Elements API credentials** + +Follow the [user guide](https://connect.withsecure.com/getting-started/elements#getting-client-credentials) to create Elements API credentials. Save credentials in a safe place. + +**2. Create Microsoft Entra application** + +Create new Microsoft Entra application and credentials. Follow [the instructions](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-microsoft-entra-application) and store values of **Directory (tenant) ID**, **Object ID**, **Application (client) ID** and **Client Secret** (from client credentials field). Remember to store Client Secret in a safe place. + +**3. Deploy Function App** + +>**NOTE:** This connector uses Azure Functions to pull logs from WithSecure Elements. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store Microsoft Entra client credentials and WithSecure Elements API client credentials in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**IMPORTANT:** Before deploying the WithSecure Elements connector, have the Workspace Name (can be copied from the following), data from Microsoft Entra (Directory (tenant) ID, Object ID, Application (client) ID and Client Secret), as well as the WithSecure Elements client credentials, readily available. +- **Workspace Name**: `workspaceName` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**1. Deploy all the resources related to the connector** + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-WithSecureElementsViaFunction-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID**, **Entra Client ID**, **Entra Client Secret**, **Entra Tenant ID**, **Elements API Client ID**, **Elements API Client Secret**. +>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. +4. You can also fill in optional fields: **Elements API url**, **Engine**, **Engine Group**. Use default value of **Elements API url** unless you have some special case. **Engine** and **Engine Group** map to [security events request parameters](https://connect.withsecure.com/api-reference/elements#post-/security-events/v1/security-events), fill in those parameters if you are interested only in events from specific engine or engine group, in case you want to receive all security events leave the fields with default values. +5. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +6. Click **Purchase** to deploy. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/wiz.md b/Tools/Solutions Analyzer/connector-docs/connectors/wiz.md index 0296bf42b25..3c0569869f4 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/wiz.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/wiz.md @@ -10,4 +10,54 @@ The Wiz connector allows you to easily send Wiz Issues, Vulnerability Findings, and Audit logs to Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Wiz Service Account credentials**: Ensure you have your Wiz service account client ID and client secret, API endpoint URL, and auth URL. Instructions can be found on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector: Uses Azure Functions to connect to Wiz API to pull Wiz Issues, Vulnerability Findings, and Audit Logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. +Creates an Azure Key Vault with all the required parameters stored as secrets. + +**1. STEP 1 - Get your Wiz credentials** + +Follow the instructions on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz) to get the erquired credentials. + +**2. STEP 2 - Deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Wiz Connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Wiz credentials from the previous step. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1: Deploy using the Azure Resource Manager (ARM) Template** + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-wiz-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the following parameters: +> - Choose **KeyVaultName** and **FunctionName** for the new resources + >- Enter the following Wiz credentials from step 1: **WizAuthUrl**, **WizEndpointUrl**, **WizClientId**, and **WizClientSecret** +>- Enter the Workspace credentials **AzureLogsAnalyticsWorkspaceId** and **AzureLogAnalyticsWorkspaceSharedKey** +>- Choose the Wiz data types you want to send to Microsoft Sentinel, choose at least one from **Wiz Issues**, **Vulnerability Findings**, and **Audit Logs**. + +>- (optional) follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#optional-create-a-filter-for-wiz-queries) to add **IssuesQueryFilter**, **VulnerbailitiesQueryFilter**, and **AuditLogsQueryFilter**. + +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2: Manual Deployment of the Azure Function** + +>Follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#manual-deployment) to deploy the connector manually. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/workdayccpdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/workdayccpdefinition.md index fbd1871d08f..de6fdf8e813 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/workdayccpdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/workdayccpdefinition.md @@ -10,4 +10,36 @@ The [Workday](https://www.workday.com/) User Activity data connector provides the capability to ingest User Activity Logs from [Workday API](https://community.workday.com/sites/default/files/file-hosting/restapi/index.html#privacy/v1/get-/activityLogging) into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **Workday User Activity API access**: Access to the Workday user activity API through Oauth are required. The API Client needs to have the scope: System and it needs to be authorized by an account with System Auditing permissions. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect to Workday to start collecting user activity logs in Microsoft Sentinel** + +1) In Workday, access the "Edit Tenant Setup - Security" task, verify "OAuth 2.0 Settings" section, make sure that the "OAuth 2.0 Clients Enabled" check box is ticked. + 2) In Workday, access the "Edit Tenant Setup - System" task, verify "User Activity Logging" section, make sure that the "Enable User Activity Logging" check box is ticked. + 3) In Workday, access the "Register API Client" task. + 4) Define the Client Name, select the "Client Grant Type": "Authorization Code Grant" and then select "Access Token Type": "Bearer" + 5) Enter the "Redirection URI": https://portal.azure.com/TokenAuthorize/ExtensionName/Microsoft_Azure_Security_Insights + 6) In section "Scope (Functional Areas)", select "System" and click OK at the bottom + 7) Copy the Client ID and Client Secret before navigating away from the page, and store it securely. + 8) In Sentinel, in the connector page - provide required Token, Authorization and User Activity Logs Endpoints, along with Client ID and Client Secret from previous step. Then click "Connect". + 9) A Workday pop up will appear to complete the OAuth2 authentication and authorization of the API client. Here you need to provide credentials for Workday account with "System Auditing" permissions in Workday (can be either Workday account or Integration System User). + 10) Once that's complete, the message will be displayed to authorize your API client +- **Token Endpoint**: https://wd2-impl-services1.workday.com/ccx/oauth2/{tenantName}/token +- **Authorization Endpoint**: https://impl.workday.com/{tenantName}/authorize +- **User Activity Logs Endpoint, it ends with /activityLogging **: https://wd2-impl-services1.workday.com/ccx/api/privacy/v1/{tenantName}/activityLogging +- **OAuth Configuration**: + - Client ID + - Client Secret + - Click 'Connect' to authenticate + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/workplacefacebook.md b/Tools/Solutions Analyzer/connector-docs/connectors/workplacefacebook.md index 561c72ef687..af61a746a7e 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/workplacefacebook.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/workplacefacebook.md @@ -10,4 +10,87 @@ The [Workplace](https://www.workplace.com/) data connector provides the capability to ingest common Workplace events into Microsoft Sentinel through Webhooks. Webhooks enable custom integration apps to subscribe to events in Workplace and receive updates in real time. When a change occurs in Workplace, an HTTPS POST request with event information is sent to a callback data connector URL. Refer to [Webhooks documentation](https://developers.facebook.com/docs/workplace/reference/webhooks) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Webhooks Credentials/permissions**: WorkplaceAppSecret, WorkplaceVerifyToken, Callback URL are required for working Webhooks. See the documentation to learn more about [configuring Webhooks](https://developers.facebook.com/docs/workplace/reference/webhooks), [configuring permissions](https://developers.facebook.com/docs/workplace/reference/permissions). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector uses Azure Functions based on HTTP Trigger for waiting POST requests with logs to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias WorkplaceFacebook and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Workplace%20from%20Facebook/Parsers/Workplace_Facebook.txt) on the second line of the query, enter the hostname(s) of your Workplace Facebook device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. + +**STEP 1 - Configuration steps for the Workplace** + + Follow the instructions to configure Webhooks. + +1. Log in to the Workplace with Admin user credentials. +2. In the Admin panel, click **Integrations**. +3. In the **All integrations** view, click **Create custom integration** +4. Enter the name and description and click **Create**. +5. In the **Integration details** panel show **App secret** and copy. +6. In the **Integration permissions** pannel set all read permissions. Refer to [permission page](https://developers.facebook.com/docs/workplace/reference/permissions) for details. +7. Now proceed to STEP 2 to follow the steps (listed in Option 1 or 2) to Deploy the Azure Function. +8. Enter the requested parameters and also enter a Token of choice. Copy this Token / Note it for the upcoming step. +9. After the deployment of Azure Functions completes successfully, open Function App page, select your app, go to **Functions**, click **Get Function URL** and copy this / Note it for the upcoming step. +10. Go back to Workplace from Facebook. In the **Configure webhooks** panel on each Tab set **Callback URL** as the same value that you copied in point 9 above and Verify token as the same + value you copied in point 8 above which was obtained during STEP 2 of Azure Functions deployment. +11. Click Save. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions** + +>**IMPORTANT:** Before deploying the Workplace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Option 1 - Azure Resource Manager (ARM) Template** + + Use this method for automated deployment of the Workplace data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-WorkplaceFacebook-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-WorkplaceFacebook-azuredeploy-gov) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. +3. Enter the **WorkplaceVerifyToken** (can be any expression, copy and save it for STEP 1), **WorkplaceAppSecret** and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. +6. After deploying open Function App page, select your app, go to the **Functions** and click **Get Function Url** copy it and follow p.7 from STEP 1. + + **Option 2 - Manual Deployment of Azure Functions** + + Use the following step-by-step instructions to deploy the Sophos Endpoint Protection data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-WorkplaceFacebook-functionapp) file. Extract archive to your local development computer. +2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode. +3. After successful deployment of the function app, follow next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select ** New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + WorkplaceAppSecret + WorkplaceVerifyToken + WorkspaceID + WorkspaceKey + logAnalyticsUri (optional) +> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/zerofoxalertsdefinition.md b/Tools/Solutions Analyzer/connector-docs/connectors/zerofoxalertsdefinition.md index 5519b7f130d..0129a42ddfc 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/zerofoxalertsdefinition.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/zerofoxalertsdefinition.md @@ -10,4 +10,22 @@ Collects alerts from ZeroFox API. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Custom Permissions:** +- **ZeroFox Personal Access Token (PAT)**: A ZeroFox PAT is required. You can get it in Data Connectors > [API Data Feeds](https://cloud.zerofox.com/data_connectors/api). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect ZeroFox to Microsoft Sentinel** + +Connect ZeroFox to Microsoft Sentinel +- **Provide your ZeroFox PAT**: (password field) +- Click 'Connect' to establish connection + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/zerofoxctidataconnector.md b/Tools/Solutions Analyzer/connector-docs/connectors/zerofoxctidataconnector.md index 2906f679b11..ecf738428f3 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/zerofoxctidataconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/zerofoxctidataconnector.md @@ -10,4 +10,47 @@ The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **ZeroFox API Credentials/permissions**: **ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Retrieval of ZeroFox credentials:** + + Follow these instructions for set up logging and obtain credentials. +1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password +2 - Click into the Settings button and go to the Data Connectors Section. +3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username. + +**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: ** + +>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Preparing resources for deployment.** + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. +3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token** +4. +5. Click **Review + Create** to deploy. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/zeronetworkssegmentauditnativepoller.md b/Tools/Solutions Analyzer/connector-docs/connectors/zeronetworkssegmentauditnativepoller.md index 0c674855243..4b55e71c4b4 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/zeronetworkssegmentauditnativepoller.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/zeronetworkssegmentauditnativepoller.md @@ -10,4 +10,22 @@ The [Zero Networks Segment](https://zeronetworks.com/) Audit data connector provides the capability to ingest Zero Networks Audit events into Microsoft Sentinel through the REST API. This data connector uses Microsoft Sentinel native polling capability. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +**Custom Permissions:** +- **Zero Networks API Token**: **ZeroNetworksAPIToken** is required for REST API. See the API Guide and follow the instructions for obtaining credentials. + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Zero Networks to Microsoft Sentinel** + +Enable Zero Networks audit Logs. +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `APIKey`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/zimperiummtdalerts.md b/Tools/Solutions Analyzer/connector-docs/connectors/zimperiummtdalerts.md index b3008af1d64..084124d8b6d 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/zimperiummtdalerts.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/zimperiummtdalerts.md @@ -10,4 +10,31 @@ Zimperium Mobile Threat Defense connector gives you the ability to connect the Zimperium threat log with Microsoft Sentinel to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's mobile threat landscape and enhances your security operation capabilities. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Configure and connect Zimperium MTD** + +1. In zConsole, click **Manage** on the navigation bar. +2. Click the **Integrations** tab. +3. Click the **Threat Reporting** button and then the **Add Integrations** button. +4. Create the Integration: + - From the available integrations, select Microsoft Sentinel. + - Enter your workspace id and primary key from the fields below, click **Next**. + - Fill in a name for your Microsoft Sentinel integration. + - Select a Filter Level for the threat data you wish to push to Microsoft Sentinel. + - Click **Finish** +5. For additional instructions, please refer to the [Zimperium customer support portal](https://support.zimperium.com). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/zoom.md b/Tools/Solutions Analyzer/connector-docs/connectors/zoom.md index 28127dec42f..dd3f4bb3105 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/zoom.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/zoom.md @@ -10,4 +10,92 @@ The [Zoom](https://zoom.us/) Reports data connector provides the capability to ingest [Zoom Reports](https://developers.zoom.us/docs/api/rest/reference/zoom-api/methods/#tag/Reports) events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://developers.zoom.us/docs/api/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **AccountID**, **ClientID** and **ClientSecret** are required for Zoom API. [See the documentation to learn more about Zoom API](https://developers.zoom.us/docs/internal-apps/create/). [Follow the instructions for Zoom API configurations](https://aka.ms/sentinel-zoomreports-readme). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Zoom API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Zoom and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZoomReports/Parsers/Zoom.yaml). The function usually takes 10-15 minutes to activate after solution installation/update. + +**STEP 1 - Configuration steps for the Zoom API** + + [Follow the instructions](https://developers.zoom.us/docs/internal-apps/create/) to obtain the credentials. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Zoom Reports data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Option 1 - Azure Resource Manager (ARM) Template** + + Use this method for automated deployment of the Zoom Audit data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ZoomAPI-azuredeployV2) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-ZoomAPI-azuredeployV2-gov) +2. Select the preferred **Subscription**, **Resource Group** and **Region**. +> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. +3. Enter the **AccountID**, **ClientID**, **ClientSecret**, **WorkspaceID**, **WorkspaceKey**, **Function Name** and click Review + create. +4. Finally click **Create** to deploy. + + **Option 2 - Manual Deployment of Azure Functions** + + Use the following step-by-step instructions to deploy the Zoom Reports data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-ZoomAPI-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ZoomXXXXX). + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration + + **Step 2 - Configure the Function App** + + 1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select ** New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + AccountID + ClientID + ClientSecret + WorkspaceID + WorkspaceKey + logAnalyticsUri (optional) + Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/zscaler.md b/Tools/Solutions Analyzer/connector-docs/connectors/zscaler.md index 851fe572831..2dfbdb3d9b8 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/zscaler.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/zscaler.md @@ -10,4 +10,59 @@ The Zscaler data connector allows you to easily connect your Zscaler Internet Access (ZIA) logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. Using Zscaler on Microsoft Sentinel will provide you more insights into your organization’s Internet usage, and will enhance its security operation capabilities.​ +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. +- **Keys** (Workspace): read permissions to shared keys for the workspace. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python --version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Common Event Format (CEF) logs to Syslog agent** + +Set Zscaler product to send Syslog messages in CEF format to your Syslog agent. Make sure you to send the logs on port 514 TCP. + +Go to [Zscaler Microsoft Sentinel integration guide](https://aka.ms/ZscalerCEFInstructions) to learn more. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python --version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/zscalerama.md b/Tools/Solutions Analyzer/connector-docs/connectors/zscalerama.md index eb94bcf5154..3ab1b7bb325 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/zscalerama.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/zscalerama.md @@ -10,4 +10,60 @@ The Zscaler data connector allows you to easily connect your Zscaler Internet Access (ZIA) logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. Using Zscaler on Microsoft Sentinel will provide you more insights into your organization’s Internet usage, and will enhance its security operation capabilities.​ +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. +- **Keys** (Workspace): read permissions to shared keys for the workspace. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Common Event Format (CEF) logs to Syslog agent** + + Set Zscaler product to send Syslog messages in CEF format to your Syslog agent. Make sure you to send the logs on port 514 TCP. + +Go to [Zscaler Microsoft Sentinel integration guide](https://aka.ms/ZscalerCEFInstructions) to learn more. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/connectors/zscalerprivateaccess.md b/Tools/Solutions Analyzer/connector-docs/connectors/zscalerprivateaccess.md index 1de00a0ca7e..e1ddf04009e 100644 --- a/Tools/Solutions Analyzer/connector-docs/connectors/zscalerprivateaccess.md +++ b/Tools/Solutions Analyzer/connector-docs/connectors/zscalerprivateaccess.md @@ -10,4 +10,70 @@ The [Zscaler Private Access (ZPA)](https://help.zscaler.com/zpa/what-zscaler-private-access) data connector provides the capability to ingest [Zscaler Private Access events](https://help.zscaler.com/zpa/log-streaming-service) into Microsoft Sentinel. Refer to [Zscaler Private Access documentation](https://help.zscaler.com/zpa) for more information. +## Permissions + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +## Setup Instructions + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-ZscalerPrivateAccess-parser) to create the Kusto Functions alias, **ZPAEvent** + +>**NOTE:** This data connector has been developed using Zscaler Private Access version: 21.67.1 + +**1. Install and onboard the agent for Linux or Windows** + +Install the agent on the Server where the Zscaler Private Access logs are forwarded. + +> Logs from Zscaler Private Access Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure the logs to be collected** + +Follow the configuration steps below to get Zscaler Private Access logs into Microsoft Sentinel. Refer to the [Azure Monitor Documentation](https://docs.microsoft.com/azure/azure-monitor/agents/data-sources-json) for more details on these steps. +Zscaler Private Access logs are delivered via Log Streaming Service (LSS). Refer to [LSS documentation](https://help.zscaler.com/zpa/about-log-streaming-service) for detailed information +1. Configure [Log Receivers](https://help.zscaler.com/zpa/configuring-log-receiver). While configuring a Log Receiver, choose **JSON** as **Log Template**. +2. Download config file [zpa.conf](https://aka.ms/sentinel-ZscalerPrivateAccess-conf) + wget -v https://aka.ms/sentinel-zscalerprivateaccess-conf -O zpa.conf +3. Login to the server where you have installed Azure Log Analytics agent. +4. Copy zpa.conf to the /etc/opt/microsoft/omsagent/**workspace_id**/conf/omsagent.d/ folder. +5. Edit zpa.conf as follows: + + a. specify port which you have set your Zscaler Log Receivers to forward logs to (line 4) + + b. zpa.conf uses the port **22033** by default. Ensure this port is not being used by any other source on your server + + c. If you would like to change the default port for **zpa.conf** make sure that it should not get conflict with default AMA agent ports I.e.(For example CEF uses TCP port **25226** or **25224**) + + d. replace **workspace_id** with real value of your Workspace ID (lines 14,15,16,19) +5. Save changes and restart the Azure Log Analytics agent for Linux service with the following command: + sudo /opt/microsoft/omsagent/bin/service_control restart +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + [← Back to Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions-index.md b/Tools/Solutions Analyzer/connector-docs/solutions-index.md index d97c7515472..56aa7c3bad2 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions-index.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions-index.md @@ -12,19 +12,16 @@ This reference documentation provides detailed information about data connectors ## Overview -This documentation covers **477 Microsoft Sentinel solutions**, including **369 solutions with data connectors**, providing access to **503 unique connectors** and **811 unique tables**. Additionally, **108 solutions without data connectors** are documented (these may contain analytics rules, workbooks, hunting queries, or playbooks). +This documentation covers **478 solutions**, of which **335** include data connectors, providing access to **462 unique connectors** and **740 unique tables**. ### Quick Statistics | Metric | Count | |--------|-------| -| Total Solutions | 477 | -| Solutions with Data Connectors | 369 | -| Solutions without Data Connectors | 108 | -| Unique Connectors | 503 | -| Unique Tables | 811 | - -## How This Documentation is Organized\n\nEach solution has its own page containing: +| Total Solutions | 478 | +| Solutions with Connectors | 335 (70%) | +| Unique Connectors | 462 | +| Unique Tables | 740 | - **Solution Overview**: Publisher, support information, and categories - **Connectors**: List of all connectors in the solution @@ -272,7 +269,7 @@ Browse solutions alphabetically: | [Google Cloud Platform Cloud Monitoring](solutions/google-cloud-platform-cloud-monitoring.md) | 2022-07-01 | Microsoft Corporation | | [Google Cloud Platform Cloud Run](solutions/google-cloud-platform-cloud-run.md) | 2021-07-30 | Microsoft Corporation | | [Google Cloud Platform Compute Engine](solutions/google-cloud-platform-compute-engine.md) | 2022-07-07 | Microsoft Corporation | -| [Google Cloud Platform Firewall Logs](solutions/google-cloud-platform-firewall-logs.md) | 2024-11-03 | Microsoft Corporation | +| [Google Cloud Platform Firewall Logs](solutions/google-cloud-platform-firewall-logs.md) | | | | [Google Cloud Platform Load Balancer Logs](solutions/google-cloud-platform-load-balancer-logs.md) | 2025-02-12 | Microsoft Corporation | | [Google Cloud Platform Security Command Center](solutions/google-cloud-platform-security-command-center.md) | 2023-09-11 | Microsoft Corporation | | [Google Cloud Platform VPC Flow Logs](solutions/google-cloud-platform-vpc-flow-logs.md) | 2025-02-12 | Microsoft Corporation | @@ -507,6 +504,7 @@ Browse solutions alphabetically: | [SIGNL4](solutions/signl4.md) | 2021-12-10 | Derdack | | [SINEC Security Guard](solutions/sinec-security-guard.md) | 2024-07-15 | Siemens AG | | [SOC Handbook](solutions/soc-handbook.md) | 2022-11-30 | Community | +| [SOC Prime CCF](solutions/soc-prime-ccf.md) | 2025-09-25 | SOC Prime | | [SOC-Process-Framework](solutions/soc-process-framework.md) | 2022-04-08 | Microsoft Corporation | | [SailPointIdentityNow](solutions/sailpointidentitynow.md) | 2021-10-26 | SailPoint | | [SalemCyber](solutions/salemcyber.md) | 2023-07-21 | Salem Cyber | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/1password.md b/Tools/Solutions Analyzer/connector-docs/solutions/1password.md index cac33eaa888..0bca7d3f22c 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/1password.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/1password.md @@ -25,6 +25,33 @@ This solution provides **2 data connector(s)**. The 1Password CCP connector allows the user to ingest 1Password Audit, Signin & ItemUsage events into Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **1Password API token**: A 1Password API Token is required. See the [1Password documentation](https://support.1password.com/events-reporting/#appendix-issue-or-revoke-bearer-tokens) on how to create an API token. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. STEP 1 - Create a 1Password API token:** + +Follow the [1Password documentation](https://support.1password.com/events-reporting/#appendix-issue-or-revoke-bearer-tokens) for guidance on this step. + +**2. STEP 2 - Choose the correct base URL:** + +There are multiple 1Password servers which might host your events. The correct server depends on your license and region. Follow the [1Password documentation](https://developer.1password.com/docs/events-api/reference/#servers) to choose the correct server. Input the base URL as displayed by the documentation (including 'https://' and without a trailing '/'). + +**3. STEP 3 - Enter your 1Password Details:** + +Enter the 1Password base URL & API Token below: +- **Base Url**: Enter your Base Url +- **API Token**: (password field) +- Click 'connect' to establish connection + | | | |--------------------------|---| | **Tables Ingested** | `OnePasswordEventLogs_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/42crunch-api-protection.md b/Tools/Solutions Analyzer/connector-docs/solutions/42crunch-api-protection.md index e2ef961ddc6..9c15ad297cb 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/42crunch-api-protection.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/42crunch-api-protection.md @@ -21,6 +21,54 @@ This solution provides **1 data connector(s)**. Connects the 42Crunch API protection to Azure Log Analytics via the REST API interface +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Step 1 : Read the detailed documentation** + +The installation process is documented in great detail in the GitHub repository [Microsoft Sentinel integration](https://github.com/42Crunch/azure-sentinel-integration). The user should consult this repository further to understand installation and debug of the integration. + +**2. Step 2: Retrieve the workspace access credentials** + +The first installation step is to retrieve both your **Workspace ID** and **Primary Key** from the Microsoft Sentinel platform. +Copy the values shown below and save them for configuration of the API log forwarder integration. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Step 3: Install the 42Crunch protection and log forwarder** + +The next step is to install the 42Crunch protection and log forwarder to protect your API. Both components are availabe as containers from the [42Crunch repository](https://hub.docker.com/u/42crunch). The exact installation will depend on your environment, consult the [42Crunch protection documentation](https://docs.42crunch.com/latest/content/concepts/api_firewall_deployment_architecture.htm) for full details. Two common installation scenarios are described below: +**Installation via Docker Compose** + + The solution can be installed using a [Docker compose file](https://github.com/42Crunch/azure-sentinel-integration/blob/main/sample-deployment/docker-compose.yml). + + **Installation via Helm charts** + + The solution can be installed using a [Helm chart](https://github.com/42Crunch/azure-sentinel-integration/tree/main/helm/sentinel). +**4. Step 4: Test the data ingestion** + +In order to test the data ingestion the user should deploy the sample *httpbin* application alongside the 42Crunch protection and log forwarder [described in detail here](https://github.com/42Crunch/azure-sentinel-integration/tree/main/sample-deployment). +**4.1 Install the sample** + + The sample application can be installed locally using a [Docker compose file](https://github.com/42Crunch/azure-sentinel-integration/blob/main/sample-deployment/docker-compose.yml) which will install the httpbin API server, the 42Crunch API protection and the Microsoft Sentinel log forwarder. Set the environment variables as required using the values copied from step 2. + + **4.2 Run the sample** + + Verfify the API protection is connected to the 42Crunch platform, and then exercise the API locally on the *localhost* at port 8080 using Postman, curl, or similar. You should see a mixture of passing and failing API calls. + + **4.3 Verify the data ingestion on Log Analytics** + + After approximately 20 minutes access the Log Analytics workspace on your Microsoft Sentinel installation, and locate the *Custom Logs* section verify that a *apifirewall_log_1_CL* table exists. Use the sample queries to examine the data. + | | | |--------------------------|---| | **Tables Ingested** | `apifirewall_log_1_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/abnormalsecurity.md b/Tools/Solutions Analyzer/connector-docs/solutions/abnormalsecurity.md index 3f806b50f8a..72a136bb38f 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/abnormalsecurity.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/abnormalsecurity.md @@ -21,6 +21,93 @@ This solution provides **1 data connector(s)**. The Abnormal Security data connector provides the capability to ingest threat and case logs into Microsoft Sentinel using the [Abnormal Security Rest API.](https://app.swaggerhub.com/apis/abnormal-security/abx/) +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Abnormal Security API Token**: An Abnormal Security API Token is required. [See the documentation to learn more about Abnormal Security API](https://app.swaggerhub.com/apis/abnormal-security/abx/). **Note:** An Abnormal Security account is required + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to Abnormal Security's REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +**STEP 1 - Configuration steps for the Abnormal Security API** + + [Follow these instructions](https://app.swaggerhub.com/apis/abnormal-security/abx) provided by Abnormal Security to configure the REST API integration. **Note:** An Abnormal Security account is required + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Abnormal Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Abnormal Security API Authorization Token, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +This method provides an automated deployment of the Abnormal Security connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-abnormalsecurity-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Microsoft Sentinel Workspace ID**, **Microsoft Sentinel Shared Key** and **Abnormal Security REST API Key**. + - The default **Time Interval** is set to pull the last five (5) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. + 4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Abnormal Security data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-abnormalsecurity-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. AbnormalSecurityXX). + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + SENTINEL_WORKSPACE_ID + SENTINEL_SHARED_KEY + ABNORMAL_SECURITY_REST_API_TOKEN + logAnalyticsUri (optional) +(add any other settings required by the Function App) +Set the `uri` value to: `` +>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Azure Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us.` +4. Once all application settings have been entered, click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `ABNORMAL_CASES_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/agari.md b/Tools/Solutions Analyzer/connector-docs/solutions/agari.md index 635cb8abeeb..8f39c4c1358 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/agari.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/agari.md @@ -21,6 +21,112 @@ This solution provides **1 data connector(s)**. This connector uses a Agari REST API connection to push data into Azure Sentinel Log Analytics. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Agari Phishing Defense, Phishing Response or Brand Protection API Client ID and Secret**: Ensure you have your Client ID and Secret keys. Instructions can be found on the [Agari Developers Site](https://developers.agari.com/agari-platform/docs/quick-start). +- **(Optional) Microsoft Security Graph API**: The Agari Function App has the ability to share threat intelleigence with Sentinel via the Security Graph API. To use this feature, you will need to enable the [Sentinel Threat Intelligence Platforms connector](https://docs.microsoft.com/azure/sentinel/connect-threat-intelligence) as well as register an application in Azure Active Directory. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Agari APIs to pull its logs into Azure Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**1. STEP 1 - Get your Agari API credentials** + +1. Log into any Agari product (Client ID and Secret are the same for all applications) +2. Click on your username in the upper right and select **Settings** +3. Click on the **Generate API Secret** link to generate an API client_id and client_secret (the link will read **Regenerate API Secret** if you have already generated an API client ID/secret previously) +4. Copy both the client_id and client_secret that are generated + +**2. STEP 2 - (Optional) Enable the Security Graph API** + +Follow the instrcutions found on article [Connect Azure Sentinel to your threat intelligence platform](https://docs.microsoft.com/azure/sentinel/connect-threat-intelligence#connect-azure-sentinel-to-your-threat-intelligence-platform). Once the application is created you will need to record the Tenant ID, Client ID and Client Secret. + +**3. STEP 3 - Deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Agari Connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Agari API credentials from the previous step. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**4. Choose a deployement option** + +**5. Option 1: Deploy using the Azure Resource Manager (ARM) Template** + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-agari-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID**, **Workspace Key**, **Agari Client ID**, **Agari Client Secret**, select `True` or `False` for the products you subscribe to, and if you wish to share IoCs with Sentinel, select `True` For **Enable Security Graph Sharing**, and enter the required IDs from the Azure Application. +> - The Function App will request data from the Agari APIs every 5 minutes, corresponding to the Funciton App Timer. +> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. +6. **NOTE:** Due to the use of Environment Variables to store log access times, the App requires 1 additonal manual step. In the Function App, select the Function App Name and select Click on **Identity** and for System assigned Identity, click on **Azure role assignments** and **Add Role assignment**. Select **Subscription** as the scope, select your subscription and set the Role to **Contributor**. Click on **Save**. + +**6. Option 2: Manual Deployment of Azure Functions** + +**1. Create a Function App** + +1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp), and select **+ Add**. +2. In the **Basics** tab, ensure Runtime stack is set to **Powershell Core**. +3. In the **Hosting** tab, ensure the **Consumption (Serverless)** plan type is selected. +4. Make other preferrable configuration changes, if needed, then click **Create**. + +**2. Import Function App Code** + +1. In the newly created Function App, select **Functions** on the left pane and click **+ Add**. +2. Click on **Code + Test** on the left pane. +3. Copy the [Function App Code](https://aka.ms/sentinel-agari-functionapp) and paste into the Function App `run.ps1` editor. +3. Click **Save**. + +**3. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following eight to twelve (8-12) application settings individually, with their respective string values (case-sensitive): + clientID + clientSecret + workspaceID + workspaceKey + enableBrandProtectionAPI + enablePhishingResponseAPI + enablePhishingDefenseAPI + resGroup + functionName + subId + enableSecurityGraphSharing + <--- Required if enableSecurityGraphSharing is set to true ---> + GraphTenantId + GraphClientId + GraphClientSecret + logAnalyticsUri (optional) +> - Enter your Agari ClientID and Secret in 'clientId' and 'clientSecret' +> - Enter 'true' or 'false' for 'enablePhishingDefense', 'enableBrandProtection', 'enablePhishingResponse' as per your product subscriptions. +> - Enter your Resource Group name in resGroup, the name of the Function (from previous step) in functionName and your Subscription ID in subId. +> - Enter 'true' or 'false' for 'enableSecurtyGraphAPI'. If you are enabling the Security Graph, the 'GraphTenantId','GraphClientId', and 'GraphClientSecret' is required. +> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. + +**4. Set Permissions for the App** + +1. In the Function App, select the Function App Name and select Click on **Identity** and for System assigned Identity, set the status to On. + +2. Next, click on **Azure role assignments** and **Add Role assignment**. Select **Subscription** as the scope, select your subscription and set the Role to **Contributor**. Click on **Save**. + +**5. Complete Setup.** + +1. Once all application settings have been entered, click **Save**. Note that it will take some time to have the required dependencies download, so you may see some inital failure messages. + | | | |--------------------------|---| | **Tables Ingested** | `agari_apdpolicy_log_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/agilesec-analytics-connector.md b/Tools/Solutions Analyzer/connector-docs/solutions/agilesec-analytics-connector.md index 0a0dd99c635..26120c05ae3 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/agilesec-analytics-connector.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/agilesec-analytics-connector.md @@ -20,6 +20,26 @@ This solution provides **1 data connector(s)**. Use this data connector to integrate with InfoSec Crypto Analytics and get data sent directly to Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. InfoSecGlobal Crypto Analytics Data Connector** + +1. Data is sent to Microsoft Sentinel through Logstash + 2. Required Logstash configuration is included with Crypto Analytics installation + 3. Documentation provided with the Crypto Analytics installation explains how to enable sending data to Microsoft Sentinel +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `InfoSecAnalytics_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/ai-analyst-darktrace.md b/Tools/Solutions Analyzer/connector-docs/solutions/ai-analyst-darktrace.md index c8a00651296..e6d7b9595be 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/ai-analyst-darktrace.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/ai-analyst-darktrace.md @@ -25,6 +25,72 @@ This solution provides **2 data connector(s)**. The Darktrace connector lets users connect Darktrace Model Breaches in real-time with Microsoft Sentinel, allowing creation of custom Dashboards, Workbooks, Notebooks and Custom Alerts to improve investigation. Microsoft Sentinel's enhanced visibility into Darktrace logs enables monitoring and mitigation of security threats. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Common Event Format (CEF) logs to Syslog agent** + + Configure Darktrace to forward Syslog messages in CEF format to your Azure workspace via the Syslog agent. + + 1) Within the Darktrace Threat Visualizer, navigate to the System Config page in the main menu under Admin. + + 2) From the left-hand menu, select Modules and choose Microsoft Sentinel from the available Workflow Integrations.\n 3) A configuration window will open. Locate Microsoft Sentinel Syslog CEF and click New to reveal the configuration settings, unless already exposed. + + 4) In the Server configuration field, enter the location of the log forwarder and optionally modify the communication port. Ensure that the port selected is set to 514 and is allowed by any intermediary firewalls. + + 5) Configure any alert thresholds, time offsets or additional settings as required. + + 6) Review any additional configuration options you may wish to enable that alter the Syslog syntax. + + 7) Enable Send Alerts and save your changes. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/aishield-ai-security-monitoring.md b/Tools/Solutions Analyzer/connector-docs/solutions/aishield-ai-security-monitoring.md index f8d6e4976b9..2b0c2326225 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/aishield-ai-security-monitoring.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/aishield-ai-security-monitoring.md @@ -22,6 +22,27 @@ This solution provides **1 data connector(s)**. [AIShield](https://www.boschaishield.com/) connector allows users to connect with AIShield custom defense mechanism logs with Microsoft Sentinel, allowing the creation of dynamic Dashboards, Workbooks, Notebooks and tailored Alerts to improve investigation and thwart attacks on AI systems. It gives users more insight into their organization's AI assets security posturing and improves their AI systems security operation capabilities.AIShield.GuArdIan analyzes the LLM generated content to identify and mitigate harmful content, safeguarding against legal, policy, role based, and usage based violations +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Note**: Users should have utilized AIShield SaaS offering to conduct vulnerability analysis and deployed custom defense mechanisms generated along with their AI asset. [**Click here**](https://azuremarketplace.microsoft.com/marketplace/apps/rbei.bgsw_aishield_product) to know more or get in touch. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**AIShield**](https://aka.ms/sentinel-boschaishield-parser) which is deployed with the Microsoft Sentinel Solution. + +>**IMPORTANT:** Before deploying the AIShield Connector, have the Workspace ID and Workspace Primary Key (can be copied from the following). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `AIShield_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/akamai-security-events.md b/Tools/Solutions Analyzer/connector-docs/solutions/akamai-security-events.md index 9c063e400a5..06e32853cee 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/akamai-security-events.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/akamai-security-events.md @@ -25,6 +25,61 @@ This solution provides **2 data connector(s)**. Akamai Solution for Microsoft Sentinel provides the capability to ingest [Akamai Security Events](https://www.akamai.com/us/en/products/security/) into Microsoft Sentinel. Refer to [Akamai SIEM Integration documentation](https://developer.akamai.com/tools/integrations/siem) for more information. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Akamai Security Events and load the function code or click [here](https://aka.ms/sentinel-akamaisecurityevents-parser), on the second line of the query, enter the hostname(s) of your Akamai Security Events device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Common Event Format (CEF) logs to Syslog agent** + + [Follow these steps](https://developer.akamai.com/tools/integrations/siem) to configure Akamai CEF connector to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/alc-webctrl.md b/Tools/Solutions Analyzer/connector-docs/solutions/alc-webctrl.md index 789950c884f..b6cbe52ea6d 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/alc-webctrl.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/alc-webctrl.md @@ -21,6 +21,51 @@ This solution provides **1 data connector(s)**. You can stream the audit logs from the WebCTRL SQL server hosted on Windows machines connected to your Microsoft Sentinel. This connection enables you to view dashboards, create custom alerts and improve investigation. This gives insights into your Industrial Control Systems that are monitored or controlled by the WebCTRL BAS application. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Install and onboard the Microsoft agent for Windows.** + +Learn about [agent setup](https://docs.microsoft.com/services-hub/health/mma-setup) and [windows events onboarding](https://docs.microsoft.com/azure/azure-monitor/agents/data-sources-windows-events). + + You can skip this step if you have already installed the Microsoft agent for Windows + +**2. Configure Windows task to read the audit data and write it to windows events** + +Install and configure the Windows Scheduled Task to read the audit logs in SQL and write them as Windows Events. These Windows Events will be collected by the agent and forward to Microsoft Sentinel. + +> Notice that the data from all machines will be stored in the selected workspace +2.1 Copy the [setup files](https://aka.ms/sentinel-automatedlogicwebctrl-tasksetup) to a location on the server. + + 2.2 Update the [ALC-WebCTRL-AuditPull.ps1](https://aka.ms/sentinel-automatedlogicwebctrl-auditpull) (copied in above step) script parameters like the target database name and windows event id's. Refer comments in the script for more details. + + 2.3 Update the windows task settings in the [ALC-WebCTRL-AuditPullTaskConfig.xml](https://aka.ms/sentinel-automatedlogicwebctrl-auditpulltaskconfig) file that was copied in above step as per requirement. Refer comments in the file for more details. + + 2.4 Install windows tasks using the updated configs copied in the above steps + - **Run the following command in powershell from the directory where the setup files are copied in step 2.1**: `schtasks.exe /create /XML "ALC-WebCTRL-AuditPullTaskConfig.xml" /tn "ALC-WebCTRL-AuditPull"` +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the Event schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, validate below steps for any run time issues: + +> 1. Make sure that the scheduled task is created and is in running state in the Windows Task Scheduler. + +>2. Check for task execution errors in the history tab in Windows Task Scheduler for the newly created task in step 2.4 + +>3. Make sure that the SQL Audit table consists new records while the scheduled windows task runs. + | | | |--------------------------|---| | **Tables Ingested** | `Event` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/alibaba-cloud-actiontrail.md b/Tools/Solutions Analyzer/connector-docs/solutions/alibaba-cloud-actiontrail.md index 7fbbb1f9336..910910d5106 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/alibaba-cloud-actiontrail.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/alibaba-cloud-actiontrail.md @@ -21,6 +21,56 @@ This solution provides **1 data connector(s)**. The [Alibaba Cloud ActionTrail](https://www.alibabacloud.com/product/actiontrail) data connector provides the capability to retrieve actiontrail events stored into [Alibaba Cloud Simple Log Service](https://www.alibabacloud.com/product/log-service) and store them into Microsoft Sentinel through the [SLS REST API](https://www.alibabacloud.com/help/sls/developer-reference/api-sls-2020-12-30-getlogs). The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **SLS REST API Credentials/permissions**: **AliCloudAccessKeyId** and **AliCloudAccessKeySecret** are required for making API calls. RAM policy statement with action of atleast `log:GetLogStoreLogs` over resource `acs:log:{#regionId}:{#accountId}:project/{#ProjectName}/logstore/{#LogstoreName}` is needed to grant a RAM user the permissions to call this operation. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Configure access to AliCloud SLS API** + +Before using the API, you need to prepare your identity account and access key pair to effectively access the API. +1. We recommend that you use a Resource Access Management (RAM) user to call API operations. For more information, see [create a RAM user and authorize the RAM user to access Simple Log Service](https://www.alibabacloud.com/help/sls/create-a-ram-user-and-authorize-the-ram-user-to-access-log-service). +2. Obtain the access key pair for the RAM user. For details see [get Access Key pair](https://www.alibabacloud.com/help/ram/user-guide/create-an-accesskey-pair). + +Note the access key pair details for the next step. + +**2. Add ActionTrail Logstore** + +To enable the Alibaba Cloud ActionTrail connector for Microsoft Sentinel, click upon add ActionTrail Logstore, fill the form with the Alibaba Cloud environment configuration and click Connect. +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **AliCloud SLS Logstore Endpoint URL** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + +**Add ActionTrail Logstore** + +*Add SLS Logstore linked to Alibaba Cloud ActionTrail* + +When you click the "Add Logstore" button in the portal, a configuration form will open. You'll need to provide: + +- **Alibaba Cloud SLS Public Endpoint** (optional): .log.aliyuncs.com +- **Project** (optional): +- **Logstore** (optional): +- **Access Key ID** (optional): Access Key ID +- **Access Key Secret** (optional): Access Key Secret + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + | | | |--------------------------|---| | **Tables Ingested** | `AliCloudActionTrailLogs_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/alsid-for-ad.md b/Tools/Solutions Analyzer/connector-docs/solutions/alsid-for-ad.md index 703d4d2b6a9..200e91b4415 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/alsid-for-ad.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/alsid-for-ad.md @@ -23,6 +23,63 @@ Alsid for Active Directory connector allows to export Alsid Indicators of Exposu It provides a data parser to manipulate the logs more easily. The different workbooks ease your Active Directory monitoring and provide different ways to visualize the data. The analytic templates allow to automate responses regarding different events, exposures, or attacks. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-alsidforad-parser) to create the Kusto Functions alias, **afad_parser** + +**1. Configure the Syslog server** + +You will first need a **linux Syslog** server that Alsid for AD will send logs to. Typically you can run **rsyslog** on **Ubuntu**. + You can then configure this server as you wish, but it is recommended to be able to output AFAD logs in a separate file. +Alternatively you can use [this Quickstart template](https://azure.microsoft.com/resources/templates/alsid-syslog-proxy/) which will deploy the Syslog server and the Microsoft agent for you. If you do use this template, you can skip step 3. + +**2. Configure Alsid to send logs to your Syslog server** + +On your **Alsid for AD** portal, go to *System*, *Configuration* and then *Syslog*. +From there you can create a new Syslog alert toward your Syslog server. + +Once this is done, check that the logs are correctly gathered on your server in a seperate file (to do this, you can use the *Test the configuration* button in the Syslog alert configuration in AFAD). +If you used the Quickstart template, the Syslog server will by default listen on port 514 in UDP and 1514 in TCP, without TLS. + +**3. Install and onboard the Microsoft agent for Linux** + +You can skip this step if you used the Quickstart template in step 1 +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**4. Configure the logs to be collected by the agents** + +Configure the agent to collect the logs. + +1. Under workspace advanced settings **Configuration**, select **Data** and then **Custom Logs**. +2. Select **Apply below configuration to my machines** and click **Add**. +3. Upload a sample AFAD Syslog file from the **Linux** machine running the **Syslog** server and click **Next**, for your convenience, you can find such a file [here](https://github.com/Azure/azure-quickstart-templates/blob/master/alsid-syslog-proxy/logs/AlsidForAD.log). +4. Set the record delimiter to **New Line** if not already the case and click **Next**. +5. Select **Linux** and enter the file path to the **Syslog** file, click **+** then **Next**. If you used the Quickstart template in step 1, the default location of the file is `/var/log/AlsidForAD.log`. +6. Set the **Name** to *AlsidForADLog_CL* then click **Done** (Azure automatically adds *_CL* at the end of the name, there must be only one, make sure the name is not *AlsidForADLog_CL_CL*). + +All of these steps are showcased [here](https://www.youtube.com/watch?v=JwV1uZSyXM4&feature=youtu.be) as an example +- **Open Syslog settings** + +> You should now be able to receive logs in the *AlsidForADLog_CL* table, logs data can be parse using the **afad_parser()** function, used by all query samples, workbooks and analytic templates. + | | | |--------------------------|---| | **Tables Ingested** | `AlsidForADLog_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/amazon-web-services-networkfirewall.md b/Tools/Solutions Analyzer/connector-docs/solutions/amazon-web-services-networkfirewall.md index ccdd088a59c..70f5f0a9586 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/amazon-web-services-networkfirewall.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/amazon-web-services-networkfirewall.md @@ -21,6 +21,69 @@ This solution provides **1 data connector(s)**. This data connector allows you to ingest AWS Network Firewall logs into Microsoft Sentinel for advanced threat detection and security monitoring. By leveraging Amazon S3 and Amazon SQS, the connector forwards network traffic logs, intrusion detection alerts, and firewall events to Microsoft Sentinel, enabling real-time analysis and correlation with other security data +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Ingesting AWS NetworkFirewall logs in Microsoft Sentinel** + +### List of Resources Required: + +* Open ID Connect (OIDC) web identity provider +* IAM Role +* Amazon S3 Bucket +* Amazon SQS +* AWSNetworkFirewall configuration +* Follow this instructions for [AWS NetworkFirewall Data connector](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20NetworkFirewall/Data%20Connectors/readme.md) configuration +#### 1. AWS CloudFormation Deployment + To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from an S3 bucket to your Log Analytics Workspace. + #### For each template, create Stack in AWS: + 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). + 2. Choose the ‘**Specify template**’ option, then ‘**Upload a template file**’ by clicking on ‘**Choose file**’ and selecting the appropriate CloudFormation template file provided below. click ‘**Choose file**’ and select the downloaded template. + 3. Click '**Next**' and '**Create stack**'. +- **Template 1: OpenID connect authentication deployment**: `Oidc` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Template 2: AWSNetworkFirewall resources deployment**: `AWSNetworkFirewall` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### 2. Connect new collectors + To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect. +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Role ARN** +- **Queue URL** +- **Stream name** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + +**Add new controller** + +*AWS S3 connector* + +When you click the "Add new collector" button in the portal, a configuration form will open. You'll need to provide: + +*Account details* + +- **Role ARN** (required) +- **Queue URL** (required) +- **Data type** (required): Select from available options + - Alert Log + - Flow Log + - Tls Log + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + | | | |--------------------------|---| | **Tables Ingested** | `AWSNetworkFirewallAlert` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/amazon-web-services-route-53.md b/Tools/Solutions Analyzer/connector-docs/solutions/amazon-web-services-route-53.md index 9405f0be328..52fb7b1b43d 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/amazon-web-services-route-53.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/amazon-web-services-route-53.md @@ -21,6 +21,110 @@ This solution provides **1 data connector(s)**. This connector enables ingestion of AWS Route 53 DNS logs into Microsoft Sentinel for enhanced visibility and threat detection. It supports DNS Resolver query logs ingested directly from AWS S3 buckets, while Public DNS query logs and Route 53 audit logs can be ingested using Microsoft Sentinel's AWS CloudWatch and CloudTrail connectors. Comprehensive instructions are provided to guide you through the setup of each log type. Leverage this connector to monitor DNS activity, detect potential threats, and improve your security posture in cloud environments. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. AWS Route53** + +This connector enables the ingestion of AWS Route 53 DNS logs into Microsoft Sentinel, providing enhanced visibility into DNS activity and strengthening threat detection capabilities. It supports direct ingestion of DNS Resolver query logs from AWS S3 buckets, while Public DNS query logs and Route 53 audit logs can be ingested via Microsoft Sentinel’s AWS CloudWatch and CloudTrail connectors. Detailed setup instructions are provided for each log type. Use this connector to monitor DNS traffic, identify potential threats, and enhance your cloud security posture. + +You can ingest the following type of logs from AWS Route 53 to Microsoft Sentinel: +1. Route 53 Resolver query logs +2. Route 53 Public Hosted zones query logs (via Microsoft Sentinel CloudWatch connector) +3. Route 53 audit logs (via Microsoft Sentinel CloudTrail connector) + +**Ingesting Route53 Resolver query logs in Microsoft Sentinel** + + ### List of Resources Required: + +* Open ID Connect (OIDC) web identity provider +* IAM Role +* Amazon S3 Bucket +* Amazon SQS +* Route 53 Resolver query logging configuration +* VPC to associate with Route53 Resolver query log config + #### 1. AWS CloudFormation Deployment + To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from an S3 bucket to your Log Analytics Workspace. + #### For each template, create Stack in AWS: + 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). + 2. Choose the ‘**Specify template**’ option, then ‘**Upload a template file**’ by clicking on ‘**Choose file**’ and selecting the appropriate CloudFormation template file provided below. click ‘**Choose file**’ and select the downloaded template. + 3. Click '**Next**' and '**Create stack**'. + - **Template 1: OpenID connect authentication deployment**: `Oidc` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + - **Template 2: AWS Route53 resources deployment**: `AWSRoute53Resolver` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + ### 2. Connect new collectors + To enable Amazon Web Services S3 DNS Route53 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect. + **Connector Management Interface** + + This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + + 📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: + - **Role ARN** + - **Queue URL** + + ➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + + 🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + + > 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + + **Add new controller** + + *AWS Security Hub connector* + + When you click the "Add new collector" button in the portal, a configuration form will open. You'll need to provide: + + *Account details* + + - **Role ARN** (required) + - **Queue URL** (required) + + > 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + + + **Ingesting Route 53 Public Hosted zones query logs (via Microsoft Sentinel CloudWatch connector)** + + Public Hosted zone query logs are exported to CloudWatch service in AWS. We can use 'Amazon Web Services S3' connector to ingest CloudWatch logs from AWS to Microsoft Sentinel. +**Step 1: Configure logging for Public DNS queries** + + 1. Sign in to the AWS Management Console and open the Route 53 console at [AWS Route 53](https://console.aws.amazon.com/route53/). +2. Navigate to Route 53 > Hosted zones. +3. Choose the Public hosted zone that you want to configure query logging for. +4. In the Hosted zone details pane, click "Configure query logging". +5. Choose an existing log group or create a new log group. +6. Choose Create. + + **Step 2: Configure Amazon Web Services S3 data connector for AWS CloudWatch** + + AWS CloudWatch logs can be exported to an S3 bucket using lambda function. To ingest Public DNS queries from `AWS CloudWatch` to `S3` bucket and then to Microsoft Sentinel, follow the instructions provided in the [Amazon Web Services S3 connector](https://learn.microsoft.com/en-us/azure/sentinel/connect-aws?tabs=s3). + + **Ingesting Route 53 audit logs (via Microsoft Sentinel CloudTrail connector)** + + Route 53 audit logs i.e. the logs related to actions taken by user, role or AWS service in Route 53 can be exported to an S3 bucket via AWS CloudTrail service. We can use 'Amazon Web Services S3' connector to ingest CloudTrail logs from AWS to Microsoft Sentinel. +**Step 1: Configure logging for AWS Route 53 Audit logs** + + 1. Sign in to the AWS Management Console and open the CloudTrail console at [AWS CloudTrail](https://console.aws.amazon.com/cloudtrail) +2. If you do not have an existing trail, click on 'Create trail' +3. Enter a name for your trail in the Trail name field. +4. Select Create new S3 bucket (you may also choose to use an existing S3 bucket). +5. Leave the other settings as default, and click Next. +6. Select Event type, make sure Management events is selected. +7. Select API activity, 'Read' and 'Write' +8. Click Next. +9. Review the settings and click 'Create trail'. + + **Step 2: Configure Amazon Web Services S3 data connector for AWS CloudTrail** + + To ingest audit and management logs from `AWS CloudTrail` to Microsoft Sentinel, follow the instructions provided in the [Amazon Web Services S3 connector](https://learn.microsoft.com/en-us/azure/sentinel/connect-aws?tabs=s3) + | | | |--------------------------|---| | **Tables Ingested** | `AWSRoute53Resolver` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/amazon-web-services.md b/Tools/Solutions Analyzer/connector-docs/solutions/amazon-web-services.md index 09b94b009de..1c2924ff349 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/amazon-web-services.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/amazon-web-services.md @@ -29,6 +29,55 @@ This solution provides **3 data connector(s)**. This connector allows you to ingest AWS WAF logs, collected in AWS S3 buckets, to Microsoft Sentinel. AWS WAF logs are detailed records of traffic that web access control lists (ACLs) analyze, which are essential for maintaining the security and performance of web applications. These logs contain information such as the time AWS WAF received the request, the specifics of the request, and the action taken by the rule that the request matched. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +#### 1. AWS CloudFormation Deployment + To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from an S3 bucket to your Log Analytics Workspace. + #### For each template, create Stack in AWS: + 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). + 2. Choose the ‘Specify template’ option, then ‘Upload a template file’ by clicking on ‘Choose file’ and selecting the appropriate CloudFormation template file provided below. click ‘Choose file’ and select the downloaded template. + 3. Click 'Next' and 'Create stack'. +- **Template 1: OpenID connect authentication deployment**: `Oidc` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Template 2: AWS WAF resources deployment**: `AwsWAF` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### 2. Connect new collectors + To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect. +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Role ARN** +- **Queue URL** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + +**Add new controller** + +*AWS S3 connector* + +When you click the "Add new collector" button in the portal, a configuration form will open. You'll need to provide: + +*Account details* + +- **Role ARN** (required) +- **Queue URL** (required) + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + | | | |--------------------------|---| | **Tables Ingested** | `AWSWAF` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/anvilogic.md b/Tools/Solutions Analyzer/connector-docs/solutions/anvilogic.md index a947ffa284c..75578d06c69 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/anvilogic.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/anvilogic.md @@ -21,6 +21,29 @@ This solution provides **1 data connector(s)**. The Anvilogic data connector allows you to pull events of interest generated in the Anvilogic ADX cluster into your Microsoft Sentinel +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **Anvilogic Application Registration Client ID and Client Secret**: To access the Anvilogic ADX we require the client id and client secret from the Anvilogic app registration + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect to Anvilogic to start collecting events of interest in Microsoft Sentinel** + +Complete the form to ingest Anvilogic Alerts into your Microsoft Sentinel +- **Token Endpoint**: https://login[.]microsoftonline[.]com//oauth2/v2.0/token +- **Anvilogic ADX Scope**: /.default +- **Anvilogic ADX Request URI**: /v2/rest/query +- **OAuth Configuration**: + - Client ID + - Client Secret + - Click 'Connect' to authenticate + | | | |--------------------------|---| | **Tables Ingested** | `Anvilogic_Alerts_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/apachehttpserver.md b/Tools/Solutions Analyzer/connector-docs/solutions/apachehttpserver.md index ed84ef449dd..bf1c6f27cf8 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/apachehttpserver.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/apachehttpserver.md @@ -21,6 +21,62 @@ This solution provides **1 data connector(s)**. The Apache HTTP Server data connector provides the capability to ingest [Apache HTTP Server](http://httpd.apache.org/) events into Microsoft Sentinel. Refer to [Apache Logs documentation](https://httpd.apache.org/docs/2.4/logs.html) for more information. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias ApacheHTTPServer and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ApacheHTTPServer/Parsers/ApacheHTTPServer.txt). The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux or Windows** + +Install the agent on the Apache HTTP Server where the logs are generated. + +> Logs from Apache HTTP Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure the logs to be collected** + +Configure the custom log directory to be collected +- **Open custom logs settings** + +1. Select the link above to open your workspace advanced settings +2. From the left pane, select **Data**, select **Custom Logs** and click **Add+** +3. Click **Browse** to upload a sample of a Apache HTTP Server log file (e.g. access.log or error.log). Then, click **Next >** +4. Select **New line** as the record delimiter and click **Next >** +5. Select **Windows** or **Linux** and enter the path to Apache HTTP logs based on your configuration. Example: + - **Windows** directory: `C:\Server\bin\Apache24\logs\*.log` + - **Linux** Directory: `/var/log/httpd/*.log` +6. After entering the path, click the '+' symbol to apply, then click **Next >** +7. Add **ApacheHTTPServer_CL** as the custom log Name and click **Done** + | | | |--------------------------|---| | **Tables Ingested** | `ApacheHTTPServer_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/archtis.md b/Tools/Solutions Analyzer/connector-docs/solutions/archtis.md index 1fc74ca80e5..33ae17097de 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/archtis.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/archtis.md @@ -21,6 +21,30 @@ This solution provides **1 data connector(s)**. [NC Protect Data Connector (archtis.com)](https://info.archtis.com/get-started-with-nc-protect-sentinel-data-connector) provides the capability to ingest user activity logs and events into Microsoft Sentinel. The connector provides visibility into NC Protect user activity logs and events in Microsoft Sentinel to improve monitoring and investigation capabilities +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **NC Protect**: You must have a running instance of NC Protect for O365. Please [contact us](https://www.archtis.com/data-discovery-classification-protection-software-secure-collaboration/). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +1. Install NC Protect into your Azure Tenancy +2. Log into the NC Protect Administration site +3. From the left hand navigation menu, select General -> User Activity Monitoring +4. Tick the checkbox to Enable SIEM and click the Configure button +5. Select Microsoft Sentinel as the Application and complete the configuration using the information below +6. Click Save to activate the connection +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `NCProtectUAL_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/argoscloudsecurity.md b/Tools/Solutions Analyzer/connector-docs/solutions/argoscloudsecurity.md index 0ec50792dc6..936b5fada7a 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/argoscloudsecurity.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/argoscloudsecurity.md @@ -21,6 +21,38 @@ This solution provides **1 data connector(s)**. The ARGOS Cloud Security integration for Microsoft Sentinel allows you to have all your important cloud security events in one place. This enables you to easily create dashboards, alerts, and correlate events across multiple systems. Overall this will improve your organization's security posture and security incident response. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Subscribe to ARGOS** + +Ensure you already own an ARGOS Subscription. If not, browse to [ARGOS Cloud Security](https://argos-security.io) and sign up to ARGOS. + +Alternatively, you can also purchase ARGOS via the [Azure Marketplace](https://azuremarketplace.microsoft.com/en-au/marketplace/apps/argoscloudsecurity1605618416175.argoscloudsecurity?tab=Overview). + +**2. Configure Sentinel integration from ARGOS** + +Configure ARGOS to forward any new detections to your Sentinel workspace by providing ARGOS with your Workspace ID and Primary Key. + +There is **no need to deploy any custom infrastructure**. + +Enter the information into the [ARGOS Sentinel](https://app.argos-security.io/account/sentinel) configuration page. + +New detections will automatically be forwarded. + +[Learn more about the integration](https://www.argos-security.io/resources#integrations) +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `ARGOS_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/aristaawakesecurity.md b/Tools/Solutions Analyzer/connector-docs/solutions/aristaawakesecurity.md index c9a4c3bc2f8..51a95732bd7 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/aristaawakesecurity.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/aristaawakesecurity.md @@ -21,6 +21,72 @@ This solution provides **1 data connector(s)**. The Awake Security CEF connector allows users to send detection model matches from the Awake Security Platform to Microsoft Sentinel. Remediate threats quickly with the power of network detection and response and speed up investigations with deep visibility especially into unmanaged entities including users, devices and applications on your network. The connector also enables the creation of network security-focused custom alerts, incidents, workbooks and notebooks that align with your existing security operations workflows. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Awake Adversarial Model match results to a CEF collector.** + +Perform the following steps to forward Awake Adversarial Model match results to a CEF collector listening on TCP port **514** at IP **192.168.0.1**: +- Navigate to the Detection Management Skills page in the Awake UI. +- Click + Add New Skill. +- Set the Expression field to, +>integrations.cef.tcp { destination: "192.168.0.1", port: 514, secure: false, severity: Warning } +- Set the Title field to a descriptive name like, +>Forward Awake Adversarial Model match result to Microsoft Sentinel. +- Set the Reference Identifier to something easily discoverable like, +>integrations.cef.sentinel-forwarder +- Click Save. + +Note: Within a few minutes of saving the definition and other fields the system will begin sending new model match results to the CEF events collector as they are detected. + +For more information, refer to the **Adding a Security Information and Event Management Push Integration** page from the Help Documentation in the Awake UI. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/armis.md b/Tools/Solutions Analyzer/connector-docs/solutions/armis.md index 6b0b2e1a119..7fd31b4a82b 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/armis.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/armis.md @@ -34,6 +34,170 @@ This solution provides **4 data connector(s)**. The [Armis](https://www.armis.com/) Device connector gives the capability to ingest Armis Devices into Microsoft Sentinel through the Armis REST API. Refer to the API documentation: `https://.armis.com/api/v1/docs` for more information. The connector provides the ability to get device information from the Armis platform. Armis uses your existing infrastructure to discover and identify devices without having to deploy any agents. Armis can also integrate with your existing IT & security management tools to identify and classify each and every device, managed or unmanaged in your environment. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **Armis Secret Key** is required. See the documentation to learn more about API on the `https://.armis.com/api/v1/doc` + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Armis API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-ArmisDevice-parser) to create the Kusto functions alias, **ArmisDevice** + +**STEP 1 - Configuration steps for the Armis API** + + Follow these instructions to create an Armis API secret key. + 1. Log into your Armis instance + 2. Navigate to Settings -> API Management + 3. If the secret key has not already been created, press the Create button to create the secret key + 4. To access the secret key, press the Show button + 5. The secret key can now be copied and used during the Armis Device connector configuration + +**STEP 2 - App Registration steps for the Application in Microsoft Entra ID** + + This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID: + 1. Sign in to the [Azure portal](https://portal.azure.com/). + 2. Search for and select **Microsoft Entra ID**. + 3. Under **Manage**, select **App registrations > New registration**. + 4. Enter a display **Name** for your application. + 5. Select **Register** to complete the initial app registration. + 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Armis Device Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app) + +**STEP 3 - Add a client secret for application in Microsoft Entra ID** + + Sometimes called an application password, a client secret is a string value required for the execution of Armis Device Data Connector. Follow the steps in this section to create a new Client Secret: + 1. In the Azure portal, in **App registrations**, select your application. + 2. Select **Certificates & secrets > Client secrets > New client secret**. + 3. Add a description for your client secret. + 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months. + 5. Select **Add**. + 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Armis Device Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret) + +**STEP 4 - Assign role of Contributor to application in Microsoft Entra ID** + + Follow the steps in this section to assign the role: + 1. In the Azure portal, Go to **Resource Group** and select your resource group. + 2. Go to **Access control (IAM)** from left panel. + 3. Click on **Add**, and then select **Add role assignment**. + 4. Select **Contributor** as role and click on next. + 5. In **Assign access to**, select `User, group, or service principal`. + 6. Click on **add members** and type **your app name** that you have created and select it. + 7. Now click on **Review + assign** and then again click on **Review + assign**. + +> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal) + +**STEP 5 - Create a Keyvault** + + Follow these instructions to create a new Keyvault. + 1. In the Azure portal, Go to **Key vaults**. Click create. + 2. Select Subsciption, Resource Group and provide unique name of keyvault. + +> **NOTE:** Create a separate key vault for each **API key** within one workspace. + +**STEP 6 - Create Access Policy in Keyvault** + + Follow these instructions to create access policy in Keyvault. + 1. Go to keyvaults, select your keyvault, go to Access policies on left side panel. Click create. + 2. Select all keys & secrets permissions. Click next. + 3. In the principal section, search by application name which was generated in STEP - 2. Click next. + +> **NOTE:** Ensure the Permission model in the Access Configuration of Key Vault is set to **'Vault access policy'** + +**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Armis Device data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Armis API Authorization Key(s) +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**8. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Armis connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ArmisDevice-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-ArmisDevice-azuredeploy-gov) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the below information : + Function Name + Workspace ID + Workspace Key + Armis Secret Key + Armis URL (https://.armis.com/api/v1/) + Armis Device Table Name + Armis Schedule + KeyVault Name + Azure Client Id + Azure Client Secret + Tenant Id +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**9. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Armis Device data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-ArmisDevice311-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ARMISXXXXX). + + e. **Select a runtime:** Choose Python 3.11 + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective values (case-sensitive): + Workspace ID + Workspace Key + Armis Secret Key + Armis URL (https://.armis.com/api/v1/) + Armis Device Table Name + Armis Schedule + KeyVault Name + Azure Client Id + Azure Client Secret + Tenant Id + logAnalyticsUri (optional) + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `Armis_Devices_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/armorblox.md b/Tools/Solutions Analyzer/connector-docs/solutions/armorblox.md index 08896764af5..cf2addaf453 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/armorblox.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/armorblox.md @@ -21,6 +21,102 @@ This solution provides **1 data connector(s)**. The [Armorblox](https://www.armorblox.com/) data connector provides the capability to ingest incidents from your Armorblox instance into Microsoft Sentinel through the REST API. The connector provides ability to get events which helps to examine potential security risks, and more. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Armorblox Instance Details**: **ArmorbloxInstanceName** OR **ArmorbloxInstanceURL** is required +- **Armorblox API Credentials**: **ArmorbloxAPIToken** is required + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Armorblox API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Configuration steps for the Armorblox API** + + Follow the instructions to obtain the API token. + +1. Log in to the Armorblox portal with your credentials. +2. In the portal, click **Settings**. +3. In the **Settings** view, click **API Keys** +4. Click **Create API Key**. +5. Enter the required information. +6. Click **Create**, and copy the API token displayed in the modal. +7. Save API token for using in the data connector. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Armorblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Armorblox data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-armorblox-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. +3. Enter the **ArmorbloxAPIToken**, **ArmorbloxInstanceURL** OR **ArmorbloxInstanceName**, and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Armorblox data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-armorblox-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. Armorblox). + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select ** New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + ArmorbloxAPIToken + ArmorbloxInstanceName OR ArmorbloxInstanceURL + WorkspaceID + WorkspaceKey + LogAnalyticsUri (optional) +> - Use LogAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `Armorblox_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/aruba-clearpass.md b/Tools/Solutions Analyzer/connector-docs/solutions/aruba-clearpass.md index 4ee3962432f..cff9da5193f 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/aruba-clearpass.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/aruba-clearpass.md @@ -25,6 +25,63 @@ This solution provides **2 data connector(s)**. The [Aruba ClearPass](https://www.arubanetworks.com/products/security/network-access-control/secure-access/) connector allows you to easily connect your Aruba ClearPass with Microsoft Sentinel, to create custom dashboards, alerts, and improve investigation. This gives you more insight into your organization’s network and improves your security operation capabilities. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias ArubaClearPass and load the function code or click [here](https://aka.ms/sentinel-arubaclearpass-parser).The function usually takes 10-15 minutes to activate after solution installation/update. +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Aruba ClearPass logs to a Syslog agent** + + Configure Aruba ClearPass to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent. +1. [Follow these instructions](https://www.arubanetworks.com/techdocs/ClearPass/6.7/PolicyManager/Content/CPPM_UserGuide/Admin/syslogExportFilters_add_syslog_filter_general.htm) to configure the Aruba ClearPass to forward syslog. +2. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/atlassianconfluenceaudit.md b/Tools/Solutions Analyzer/connector-docs/solutions/atlassianconfluenceaudit.md index d29c7c434fc..9832f3c44a0 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/atlassianconfluenceaudit.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/atlassianconfluenceaudit.md @@ -29,6 +29,45 @@ This solution provides **3 data connector(s)**. The [Atlassian Confluence](https://www.atlassian.com/software/confluence) Audit data connector provides the capability to ingest [Confluence Audit Records](https://support.atlassian.com/confluence-cloud/docs/view-the-audit-log/) events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://support.atlassian.com/confluence-cloud/docs/view-the-audit-log/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **Atlassian Confluence API access**: Permission of [Administer Confluence](https://developer.atlassian.com/cloud/confluence/rest/v1/intro/#auth) is required to get access to the Confluence Audit logs API. See [Confluence API documentation](https://developer.atlassian.com/cloud/confluence/rest/v1/api-group-audit/#api-wiki-rest-api-audit-get) to learn more about the audit API. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +To enable the Atlassian Confluence connector for Microsoft Sentinel, click to add an organization, fill the form with the Confluence environment credentials and click to Connect. + Follow [these steps](https://support.atlassian.com/atlassian-account/docs/manage-api-tokens-for-your-atlassian-account/) to create an API token. +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Atlassian Confluence organization URL** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + +**Add organization** + +*Add Atlassian Confluence organization* + +When you click the "Add organization" button in the portal, a configuration form will open. You'll need to provide: + +- **Atlassian Confluence organization URL** (optional): .atlassian.net +- **User Name** (optional): User Name (e.g., user@example.com) +- **API Token** (optional): API Token + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + | | | |--------------------------|---| | **Tables Ingested** | `ConfluenceAuditLogs_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/atlassianjiraaudit.md b/Tools/Solutions Analyzer/connector-docs/solutions/atlassianjiraaudit.md index 0cdfd90f87d..b5a3ad4e6ab 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/atlassianjiraaudit.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/atlassianjiraaudit.md @@ -25,6 +25,45 @@ This solution provides **2 data connector(s)**. The [Atlassian Jira](https://www.atlassian.com/software/jira) Audit data connector provides the capability to ingest [Jira Audit Records](https://support.atlassian.com/jira-cloud-administration/docs/audit-activities-in-jira-applications/) events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-audit-records/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **Atlassian Jira API access**: Permission of [Administer Jira](https://developer.atlassian.com/cloud/jira/platform/rest/v3/intro/#authentication) is required to get access to the Jira Audit logs API. See [Jira API documentation](https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-audit-records/#api-group-audit-records) to learn more about the audit API. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +To enable the Atlassian Jira connector for Microsoft Sentinel, click to add an organization, fill the form with the Jira environment credentials and click to Connect. + Follow [these steps](https://support.atlassian.com/atlassian-account/docs/manage-api-tokens-for-your-atlassian-account/) to create an API token. +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Atlassian Jira organization URL** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + +**Add organization** + +*Add Atlassian Jira organization* + +When you click the "Add organization" button in the portal, a configuration form will open. You'll need to provide: + +- **Atlassian Jira organization URL** (optional): Atlassian Jira organization URL +- **User Name** (optional): User Name (e.g., user@example.com) +- **API Key** (optional): API Key + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + | | | |--------------------------|---| | **Tables Ingested** | `Jira_Audit_v2_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/auth0.md b/Tools/Solutions Analyzer/connector-docs/solutions/auth0.md index da35c2b18b4..ec42b0b0f72 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/auth0.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/auth0.md @@ -25,6 +25,25 @@ This solution provides **2 data connector(s)**. The [Auth0](https://auth0.com/docs/api/management/v2/logs/get-logs) data connector allows ingesting logs from Auth0 API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses Auth0 API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +#### STEP 1 - Configuration steps for the Auth0 Management API +Follow the instructions to obtain the credentials. + 1. In Auth0 Dashboard, go to [**Applications > Applications**] + 2. Select your Application. This should be a [**Machine-to-Machine**] Application configured with at least [**read:logs**] and [**read:logs_users**] permissions. + 3. Copy [**Domain, ClientID, Client Secret**] +- **Base API URL**: https://example.auth0.com +- **Client ID**: Client ID +- **Client Secret**: (password field) +- Click 'Connect' to establish connection + | | | |--------------------------|---| | **Tables Ingested** | `Auth0Logs_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/authomize.md b/Tools/Solutions Analyzer/connector-docs/solutions/authomize.md index 68474e36eaa..7a8fa8e0f08 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/authomize.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/authomize.md @@ -21,6 +21,31 @@ This solution provides **1 data connector(s)**. The Authomize Data Connector provides the capability to ingest custom log types from Authomize into Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Include custom pre-requisites if the connectivity requires - else delete customs**: Description for any custom pre-requisite + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Locate your Authomize API key** + +Follow the setup instructions [located under Data Connectors for Authomize](https://github.com/authomize/Open-ITDR/blob/main/Open-Connectors/Platform/Azure-Sentinel/Data%20Connectors/readme.md). + +**2. Deploy the Authomize data connector using the setup instructions.** + +Follow the Instructions on [deploying the data connector to ingest data from Authomize](https://github.com/authomize/Open-ITDR/blob/main/Open-Connectors/Platform/Azure-Sentinel/Data%20Connectors/readme.md). + +**3. Finalize your setup** + +Validate that your script is running. Simple instructions are located under the [Authomize Data Connector area](https://github.com/authomize/Open-ITDR/blob/main/Open-Connectors/Platform/Azure-Sentinel/Data%20Connectors/readme.md). + | | | |--------------------------|---| | **Tables Ingested** | `Authomize_v2_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/aws-accesslogs.md b/Tools/Solutions Analyzer/connector-docs/solutions/aws-accesslogs.md index 4ad1d2531e1..d0f5bb657ce 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/aws-accesslogs.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/aws-accesslogs.md @@ -21,6 +21,62 @@ This solution provides **1 data connector(s)**. This connector allows you to ingest AWS S3 Server Access Logs into Microsoft Sentinel. These logs contain detailed records for requests made to S3 buckets, including the type of request, resource accessed, requester information, and response details. These logs are useful for analyzing access patterns, debugging issues, and ensuring security compliance. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +**Custom Permissions:** +- **Environment**: You must have the following AWS resources defined and configured: S3 Bucket, Simple Queue Service (SQS), IAM roles and permissions policies. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +### 1. AWS CloudFormation Deployment + To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from an AWS S3 Server Access logs to your Log Analytics Workspace. + +#### Deploy CloudFormation Templates in AWS: +1. Navigate to the [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). +2. Click **Create stack** and select **With new resources**. +3. Choose **Upload a template file**, then click **Choose file** to upload the appropriate CloudFormation template provided. +4. Follow the prompts and click **Next** to complete the stack creation. +5. After the stacks are created, note down the **Role ARN** and **SQS Queue URL**. + +- **Template 1: OpenID Connect authentication provider deployment**: `Oidc` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Template 2: AWS Server Access resources deployment**: `AWSS3ServerAccess` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +### 2. Connect new collectors + To enable AWS S3 Server Access Logs Connector for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect. +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Role ARN** +- **Queue URL** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + +**Add new collector** + +*AWS Server Access Logs connector* + +When you click the "Add new collector" button in the portal, a configuration form will open. You'll need to provide: + +*Account details* + +- **Role ARN** (required) +- **Queue URL** (required) + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + | | | |--------------------------|---| | **Tables Ingested** | `AWSS3ServerAccess` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/aws-cloudfront.md b/Tools/Solutions Analyzer/connector-docs/solutions/aws-cloudfront.md index 819502bb6a9..c20a1fcddeb 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/aws-cloudfront.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/aws-cloudfront.md @@ -21,6 +21,63 @@ This solution provides **1 data connector(s)**. This data connector enables the integration of AWS CloudFront logs with Microsoft Sentinel to support advanced threat detection, investigation, and security monitoring. By utilizing Amazon S3 for log storage and Amazon SQS for message queuing, the connector reliably ingests CloudFront access logs into Microsoft Sentinel +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Ingesting AWS CloudFront logs in Microsoft Sentinel** + +### List of Resources Required: + +* Open ID Connect (OIDC) web identity provider +* IAM Role +* Amazon S3 Bucket +* Amazon SQS +* AWS CloudFront configuration +#### 1. AWS CloudFormation Deployment + To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from an S3 bucket to your Log Analytics Workspace. + #### For each template, create Stack in AWS: + 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). + 2. Choose the ‘**Specify template**’ option, then ‘**Upload a template file**’ by clicking on ‘**Choose file**’ and selecting the appropriate CloudFormation template file provided below. click ‘**Choose file**’ and select the downloaded template. + 3. Click '**Next**' and '**Create stack**'. +- **Template 1: OpenID connect authentication deployment**: `Oidc` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Template 2: AWSCloudFront resources deployment**: `AWSCloudFront` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### 2. Connect new collectors + To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect. +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Role ARN** +- **Queue URL** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + +**Add new controller** + +*AWS S3 connector* + +When you click the "Add new collector" button in the portal, a configuration form will open. You'll need to provide: + +*Account details* + +- **Role ARN** (required) +- **Queue URL** (required) + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + | | | |--------------------------|---| | **Tables Ingested** | `AWSCloudFront_AccessLog_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/aws-security-hub.md b/Tools/Solutions Analyzer/connector-docs/solutions/aws-security-hub.md index aaaacde40a4..ed08e4296f3 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/aws-security-hub.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/aws-security-hub.md @@ -22,6 +22,62 @@ This solution provides **1 data connector(s)**. This connector enables the ingestion of AWS Security Hub Findings, which are collected in AWS S3 buckets, into Microsoft Sentinel. It helps streamline the process of monitoring and managing security alerts by integrating AWS Security Hub Findings with Microsoft Sentinel's advanced threat detection and response capabilities. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +**Custom Permissions:** +- **Environment**: You must have the following AWS resources defined and configured: AWS Security Hub, Amazon Data Firehose, Amazon EventBridge, S3 Bucket, Simple Queue Service (SQS), IAM roles and permissions policies. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +### 1. AWS CloudFormation Deployment + Use the provided CloudFormation templates to configure the AWS environment for sending logs from AWS Security Hub to your Log Analytics Workspace. + +#### Deploy CloudFormation Templates in AWS: +1. Navigate to the [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). +2. Click **Create stack** and select **With new resources**. +3. Choose **Upload a template file**, then click **Choose file** to upload the appropriate CloudFormation template provided. +4. Follow the prompts and click **Next** to complete the stack creation. +5. After the stacks are created, note down the **Role ARN** and **SQS Queue URL**. + +- **Template 1: OpenID Connect authentication provider deployment**: `Oidc` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Template 2: AWS Security Hub resources deployment**: `AwsSecurityHub` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +### 2. Connect new collectors + To enable AWS Security Hub Connector for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect. +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Role ARN** +- **Queue URL** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + +**Add new controller** + +*AWS Security Hub connector* + +When you click the "Add new collector" button in the portal, a configuration form will open. You'll need to provide: + +*Account details* + +- **Role ARN** (required) +- **Queue URL** (required) + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + | | | |--------------------------|---| | **Tables Ingested** | `AWSSecurityHubFindings` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/aws-vpc-flow-logs.md b/Tools/Solutions Analyzer/connector-docs/solutions/aws-vpc-flow-logs.md index eacd2b09e8d..da6d9c1dcf2 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/aws-vpc-flow-logs.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/aws-vpc-flow-logs.md @@ -21,6 +21,59 @@ This solution provides **1 data connector(s)**. This connector allows you to ingest AWS VPC Flow Logs, collected in AWS S3 buckets, to Microsoft Sentinel. AWS VPC Flow Logs provide visibility into network traffic within your AWS Virtual Private Cloud (VPC), enabling security analysis and network monitoring. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +#### 1. AWS CloudFormation Deployment + To configure access on AWS, two templates have been generated to set up the AWS environment to send VPC Flow Logs from an S3 bucket to your Log Analytics Workspace. + #### For each template, create a Stack in AWS: + 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). + 2. Choose the ‘Specify template’ option, then ‘Upload a template file’ by clicking on ‘Choose file’ and selecting the appropriate CloudFormation template file provided below. Click ‘Choose file’ and select the downloaded template. + 3. Click 'Next' and 'Create stack'. +- **Template 1: OpenID connect authentication deployment**: `Oidc` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Template 2: AWS VPC Flow Logs resources deployment**: `AwsVPCFlow` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### 2. Connect new collectors + To enable AWS S3 for Microsoft Sentinel, click the 'Add new collector' button, fill in the required information and click on 'Connect' +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Role ARN** +- **Queue URL** +- **File Format** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + +**Add new controller** + +*AWS VPC Flow Logs connector* + +When you click the "Add new collector" button in the portal, a configuration form will open. You'll need to provide: + +*Account details* + +- **Role ARN** (required) +- **Queue URL** (required) +- **Data type** (required): Select from available options + - JSON Format + - Parquet Format + - CSV Format + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + | | | |--------------------------|---| | **Tables Ingested** | `AWSVPCFlow` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/azure-activity.md b/Tools/Solutions Analyzer/connector-docs/solutions/azure-activity.md index 4412e21e7c3..5fe858ece5a 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/azure-activity.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/azure-activity.md @@ -21,6 +21,40 @@ This solution provides **1 data connector(s)**. Azure Activity Log is a subscription log that provides insight into subscription-level events that occur in Azure, including events from Azure Resource Manager operational data, service health events, write operations taken on the resources in your subscription, and the status of activities performed in Azure. For more information, see the [Microsoft Sentinel documentation ](https://go.microsoft.com/fwlink/p/?linkid=2219695&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. + +**Custom Permissions:** +- **Policy​**: owner role assigned for each policy assignment scope.​ +- **Subscription**: owner role permission on the relevant subscription + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +ℹ️ This connector has been updated to use the diagnostics settings back-end pipeline. which provides increased functionality and better consistency with resource logs. +Connectors using this pipeline can also be governed at scale by Azure Policy. Learn more about the new Azure Activity connector. +Follow the instructions below to upgrade your connector to the diagnostics settings pipeline. + +**1. Disconnect your subscriptions from the legacy method** + +The subscriptions listed below are still using the older, legacy method. You are strongly encouraged to upgrade to the new pipeline.
+To do this, click on the 'Disconnect All' button below, before proceeding to launch the Azure Policy Assignment wizard. +- Configure data source: AzureActivityLog + +**2. Connect your subscriptions through diagnostic settings new pipeline** + +This connector uses Azure Policy to apply a single Azure Subscription log-streaming configuration to a collection of subscriptions, defined as a scope. +Follow the instructions below to create and apply a policy to all current and future subscriptions. **Note**, you may already have an active policy for this resource type. +**Launch the Azure Policy Assignment wizard and follow the steps.​** + + >1. In the **Basics** tab, click the button with the three dots under **Scope** to select your resources assignment scope. + >2. In the **Parameters** tab, choose your Microsoft Sentinel workspace from the **Log Analytics workspace** drop-down list, and leave marked as "True" all the log and metric types you want to ingest. + >3. To apply the policy on your existing resources, select the **Remediation tab** and mark the **Create a remediation task** checkbox. + - **Configure policy assignment** + | | | |--------------------------|---| | **Tables Ingested** | `AzureActivity` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/azure-batch-account.md b/Tools/Solutions Analyzer/connector-docs/solutions/azure-batch-account.md index e573ac07be8..4abb4a27b6b 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/azure-batch-account.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/azure-batch-account.md @@ -13,27 +13,8 @@ ## Data Connectors -This solution provides **1 data connector(s)**. +**This solution does not include data connectors.** -### [Azure Batch Account](../connectors/azurebatchaccount-ccp.md) - -**Publisher:** Microsoft - -Azure Batch Account is a uniquely identified entity within the Batch service. Most Batch solutions use Azure Storage for storing resource files and output files, so each Batch account is usually associated with a corresponding storage account. This connector lets you stream your Azure Batch account diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2224103&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -| | | -|--------------------------|---| -| **Tables Ingested** | `AzureDiagnostics` | -| **Connector Definition Files** | [AzureBatchAccount_CCP.JSON](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Batch%20Account/Data%20Connectors/AzureBatchAccount_CCP.JSON) | - -[→ View full connector details](../connectors/azurebatchaccount-ccp.md) - -## Tables Reference - -This solution ingests data into **1 table(s)**: - -| Table | Used By Connectors | -|-------|-------------------| -| `AzureDiagnostics` | [Azure Batch Account](../connectors/azurebatchaccount-ccp.md) | +This solution may contain other components such as analytics rules, workbooks, hunting queries, or playbooks. [← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/azure-cloud-ngfw-by-palo-alto-networks.md b/Tools/Solutions Analyzer/connector-docs/solutions/azure-cloud-ngfw-by-palo-alto-networks.md index ceac2dbfeac..82be672a8cb 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/azure-cloud-ngfw-by-palo-alto-networks.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/azure-cloud-ngfw-by-palo-alto-networks.md @@ -22,6 +22,29 @@ This solution provides **1 data connector(s)**. Cloud Next-Generation Firewall by Palo Alto Networks - an Azure Native ISV Service - is Palo Alto Networks Next-Generation Firewall (NGFW) delivered as a cloud-native service on Azure. You can discover Cloud NGFW in the Azure Marketplace and consume it in your Azure Virtual Networks (VNet). With Cloud NGFW, you can access the core NGFW capabilities such as App-ID, URL filtering based technologies. It provides threat prevention and detection through cloud-delivered security services and threat prevention signatures. The connector allows you to easily connect your Cloud NGFW logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities. For more information, see the [Cloud NGFW for Azure documentation](https://docs.paloaltonetworks.com/cloud-ngfw/azure). +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Cloud NGFW by Palo Alto Networks to Microsoft Sentinel** + +Enable Log Settings on All Cloud NGFWs by Palo Alto Networks. +- Configure log settings: OpenCloudNGFW + +Inside your Cloud NGFW resource: + +1. Navigate to the **Log Settings** from the homepage. +2. Ensure the **Enable Log Settings** checkbox is checked. +3. From the **Log Settings** drop-down, choose the desired Log Analytics Workspace. +4. Confirm your selections and configurations. +5. Click **Save** to apply the settings. + | | | |--------------------------|---| | **Tables Ingested** | `fluentbit_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/azure-cognitive-search.md b/Tools/Solutions Analyzer/connector-docs/solutions/azure-cognitive-search.md index 2c1bcdb6bfa..f509c2d73fd 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/azure-cognitive-search.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/azure-cognitive-search.md @@ -13,27 +13,8 @@ ## Data Connectors -This solution provides **1 data connector(s)**. +**This solution does not include data connectors.** -### [Azure Cognitive Search](../connectors/azurecognitivesearch-ccp.md) - -**Publisher:** Microsoft - -Azure Cognitive Search is a cloud search service that gives developers infrastructure, APIs, and tools for building a rich search experience over private, heterogeneous content in web, mobile, and enterprise applications. This connector lets you stream your Azure Cognitive Search diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity. - -| | | -|--------------------------|---| -| **Tables Ingested** | `AzureDiagnostics` | -| **Connector Definition Files** | [AzureCognitiveSearch_CCP.JSON](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Cognitive%20Search/Data%20Connectors/AzureCognitiveSearch_CCP.JSON) | - -[→ View full connector details](../connectors/azurecognitivesearch-ccp.md) - -## Tables Reference - -This solution ingests data into **1 table(s)**: - -| Table | Used By Connectors | -|-------|-------------------| -| `AzureDiagnostics` | [Azure Cognitive Search](../connectors/azurecognitivesearch-ccp.md) | +This solution may contain other components such as analytics rules, workbooks, hunting queries, or playbooks. [← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/azure-data-lake-storage-gen1.md b/Tools/Solutions Analyzer/connector-docs/solutions/azure-data-lake-storage-gen1.md index 1e060372162..93b225d7cb2 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/azure-data-lake-storage-gen1.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/azure-data-lake-storage-gen1.md @@ -13,27 +13,8 @@ ## Data Connectors -This solution provides **1 data connector(s)**. +**This solution does not include data connectors.** -### [Azure Data Lake Storage Gen1](../connectors/azuredatalakestoragegen1-ccp.md) - -**Publisher:** Microsoft - -Azure Data Lake Storage Gen1 is an enterprise-wide hyper-scale repository for big data analytic workloads. Azure Data Lake enables you to capture data of any size, type, and ingestion speed in one single place for operational and exploratory analytics. This connector lets you stream your Azure Data Lake Storage Gen1 diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2223812&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -| | | -|--------------------------|---| -| **Tables Ingested** | `AzureDiagnostics` | -| **Connector Definition Files** | [AzureDataLakeStorageGen1_CCP.JSON](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Data%20Lake%20Storage%20Gen1/Data%20Connectors/AzureDataLakeStorageGen1_CCP.JSON) | - -[→ View full connector details](../connectors/azuredatalakestoragegen1-ccp.md) - -## Tables Reference - -This solution ingests data into **1 table(s)**: - -| Table | Used By Connectors | -|-------|-------------------| -| `AzureDiagnostics` | [Azure Data Lake Storage Gen1](../connectors/azuredatalakestoragegen1-ccp.md) | +This solution may contain other components such as analytics rules, workbooks, hunting queries, or playbooks. [← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/azure-ddos-protection.md b/Tools/Solutions Analyzer/connector-docs/solutions/azure-ddos-protection.md index 4af71ab89fb..156fcb89f39 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/azure-ddos-protection.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/azure-ddos-protection.md @@ -13,27 +13,8 @@ ## Data Connectors -This solution provides **1 data connector(s)**. +**This solution does not include data connectors.** -### [Azure DDoS Protection](../connectors/ddos.md) - -**Publisher:** Microsoft - -Connect to Azure DDoS Protection Standard logs via Public IP Address Diagnostic Logs. In addition to the core DDoS protection in the platform, Azure DDoS Protection Standard provides advanced DDoS mitigation capabilities against network attacks. It's automatically tuned to protect your specific Azure resources. Protection is simple to enable during the creation of new virtual networks. It can also be done after creation and requires no application or resource changes. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2219760&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -| | | -|--------------------------|---| -| **Tables Ingested** | `AzureDiagnostics` | -| **Connector Definition Files** | [DDOS.JSON](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20DDoS%20Protection/Data%20Connectors/DDOS.JSON) | - -[→ View full connector details](../connectors/ddos.md) - -## Tables Reference - -This solution ingests data into **1 table(s)**: - -| Table | Used By Connectors | -|-------|-------------------| -| `AzureDiagnostics` | [Azure DDoS Protection](../connectors/ddos.md) | +This solution may contain other components such as analytics rules, workbooks, hunting queries, or playbooks. [← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/azure-event-hubs.md b/Tools/Solutions Analyzer/connector-docs/solutions/azure-event-hubs.md index d24d7c00cd4..27fa4725e8e 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/azure-event-hubs.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/azure-event-hubs.md @@ -13,27 +13,8 @@ ## Data Connectors -This solution provides **1 data connector(s)**. +**This solution does not include data connectors.** -### [Azure Event Hub](../connectors/azureeventhub-ccp.md) - -**Publisher:** Microsoft - -Azure Event Hubs is a big data streaming platform and event ingestion service. It can receive and process millions of events per second. This connector lets you stream your Azure Event Hub diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity. - -| | | -|--------------------------|---| -| **Tables Ingested** | `AzureDiagnostics` | -| **Connector Definition Files** | [AzureEventHub_CCP.JSON](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Event%20Hubs/Data%20Connectors/AzureEventHub_CCP.JSON) | - -[→ View full connector details](../connectors/azureeventhub-ccp.md) - -## Tables Reference - -This solution ingests data into **1 table(s)**: - -| Table | Used By Connectors | -|-------|-------------------| -| `AzureDiagnostics` | [Azure Event Hub](../connectors/azureeventhub-ccp.md) | +This solution may contain other components such as analytics rules, workbooks, hunting queries, or playbooks. [← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/azure-firewall.md b/Tools/Solutions Analyzer/connector-docs/solutions/azure-firewall.md index 6f54b8a577d..95e84ee64e5 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/azure-firewall.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/azure-firewall.md @@ -13,45 +13,8 @@ ## Data Connectors -This solution provides **1 data connector(s)**. +**This solution does not include data connectors.** -### [Azure Firewall](../connectors/azurefirewall.md) - -**Publisher:** Microsoft - -Connect to Azure Firewall. Azure Firewall is a managed, cloud-based network security service that protects your Azure Virtual Network resources. It's a fully stateful firewall as a service with built-in high availability and unrestricted cloud scalability. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220124&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -| | | -|--------------------------|---| -| **Tables Ingested** | `AZFWApplicationRule` | -| | `AZFWDnsQuery` | -| | `AZFWFatFlow` | -| | `AZFWFlowTrace` | -| | `AZFWIdpsSignature` | -| | `AZFWInternalFqdnResolutionFailure` | -| | `AZFWNatRule` | -| | `AZFWNetworkRule` | -| | `AZFWThreatIntel` | -| | `AzureDiagnostics` | -| **Connector Definition Files** | [AzureFirewall.JSON](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall/Data%20Connectors/AzureFirewall.JSON) | - -[→ View full connector details](../connectors/azurefirewall.md) - -## Tables Reference - -This solution ingests data into **10 table(s)**: - -| Table | Used By Connectors | -|-------|-------------------| -| `AZFWApplicationRule` | [Azure Firewall](../connectors/azurefirewall.md) | -| `AZFWDnsQuery` | [Azure Firewall](../connectors/azurefirewall.md) | -| `AZFWFatFlow` | [Azure Firewall](../connectors/azurefirewall.md) | -| `AZFWFlowTrace` | [Azure Firewall](../connectors/azurefirewall.md) | -| `AZFWIdpsSignature` | [Azure Firewall](../connectors/azurefirewall.md) | -| `AZFWInternalFqdnResolutionFailure` | [Azure Firewall](../connectors/azurefirewall.md) | -| `AZFWNatRule` | [Azure Firewall](../connectors/azurefirewall.md) | -| `AZFWNetworkRule` | [Azure Firewall](../connectors/azurefirewall.md) | -| `AZFWThreatIntel` | [Azure Firewall](../connectors/azurefirewall.md) | -| `AzureDiagnostics` | [Azure Firewall](../connectors/azurefirewall.md) | +This solution may contain other components such as analytics rules, workbooks, hunting queries, or playbooks. [← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/azure-key-vault.md b/Tools/Solutions Analyzer/connector-docs/solutions/azure-key-vault.md index fa9908f3d4e..1d9dab9a440 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/azure-key-vault.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/azure-key-vault.md @@ -13,27 +13,8 @@ ## Data Connectors -This solution provides **1 data connector(s)**. +**This solution does not include data connectors.** -### [Azure Key Vault](../connectors/azurekeyvault.md) - -**Publisher:** Microsoft - -Azure Key Vault is a cloud service for securely storing and accessing secrets. A secret is anything that you want to tightly control access to, such as API keys, passwords, certificates, or cryptographic keys. This connector lets you stream your Azure Key Vault diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity in all your instances. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220125&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -| | | -|--------------------------|---| -| **Tables Ingested** | `AzureDiagnostics` | -| **Connector Definition Files** | [AzureKeyVault.JSON](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Key%20Vault/Data%20Connectors/AzureKeyVault.JSON) | - -[→ View full connector details](../connectors/azurekeyvault.md) - -## Tables Reference - -This solution ingests data into **1 table(s)**: - -| Table | Used By Connectors | -|-------|-------------------| -| `AzureDiagnostics` | [Azure Key Vault](../connectors/azurekeyvault.md) | +This solution may contain other components such as analytics rules, workbooks, hunting queries, or playbooks. [← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/azure-kubernetes-service.md b/Tools/Solutions Analyzer/connector-docs/solutions/azure-kubernetes-service.md index 0cc160baf61..5828556f6db 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/azure-kubernetes-service.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/azure-kubernetes-service.md @@ -13,31 +13,8 @@ ## Data Connectors -This solution provides **1 data connector(s)**. +**This solution does not include data connectors.** -### [Azure Kubernetes Service (AKS)](../connectors/azurekubernetes.md) - -**Publisher:** Microsoft - -Azure Kubernetes Service (AKS) is an open-source, fully-managed container orchestration service that allows you to deploy, scale, and manage Docker containers and container-based applications in a cluster environment. This connector lets you stream your Azure Kubernetes Service (AKS) diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity in all your instances. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2219762&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -| | | -|--------------------------|---| -| **Tables Ingested** | `AzureDiagnostics` | -| | `ContainerInventory` | -| | `KubeEvents` | -| **Connector Definition Files** | [AzureKubernetes.JSON](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20kubernetes%20Service/Data%20Connectors/AzureKubernetes.JSON) | - -[→ View full connector details](../connectors/azurekubernetes.md) - -## Tables Reference - -This solution ingests data into **3 table(s)**: - -| Table | Used By Connectors | -|-------|-------------------| -| `AzureDiagnostics` | [Azure Kubernetes Service (AKS)](../connectors/azurekubernetes.md) | -| `ContainerInventory` | [Azure Kubernetes Service (AKS)](../connectors/azurekubernetes.md) | -| `KubeEvents` | [Azure Kubernetes Service (AKS)](../connectors/azurekubernetes.md) | +This solution may contain other components such as analytics rules, workbooks, hunting queries, or playbooks. [← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/azure-logic-apps.md b/Tools/Solutions Analyzer/connector-docs/solutions/azure-logic-apps.md index dbe71040674..cc361fddade 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/azure-logic-apps.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/azure-logic-apps.md @@ -13,27 +13,8 @@ ## Data Connectors -This solution provides **1 data connector(s)**. +**This solution does not include data connectors.** -### [Azure Logic Apps](../connectors/azurelogicapps-ccp.md) - -**Publisher:** Microsoft - -Azure Logic Apps is a cloud-based platform for creating and running automated workflows that integrate your apps, data, services, and systems. This connector lets you stream your Azure Logic Apps diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity. - -| | | -|--------------------------|---| -| **Tables Ingested** | `AzureDiagnostics` | -| **Connector Definition Files** | [AzureLogicApps_CCP.JSON](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Logic%20Apps/Data%20Connectors/AzureLogicApps_CCP.JSON) | - -[→ View full connector details](../connectors/azurelogicapps-ccp.md) - -## Tables Reference - -This solution ingests data into **1 table(s)**: - -| Table | Used By Connectors | -|-------|-------------------| -| `AzureDiagnostics` | [Azure Logic Apps](../connectors/azurelogicapps-ccp.md) | +This solution may contain other components such as analytics rules, workbooks, hunting queries, or playbooks. [← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/azure-network-security-groups.md b/Tools/Solutions Analyzer/connector-docs/solutions/azure-network-security-groups.md index 54ea09009dc..bec77a3c1ee 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/azure-network-security-groups.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/azure-network-security-groups.md @@ -13,43 +13,8 @@ ## Data Connectors -This solution provides **1 data connector(s)**. +**This solution does not include data connectors.** -### [Network Security Groups](../connectors/azurensg.md) - -**Publisher:** Microsoft - -Azure network security groups (NSG) allow you to filter network traffic to and from Azure resources in an Azure virtual network. A network security group includes rules that allow or deny traffic to a virtual network subnet, network interface, or both. - - - -When you enable logging for an NSG, you can gather the following types of resource log information: - - - -- **Event:** Entries are logged for which NSG rules are applied to VMs, based on MAC address. - -- **Rule counter:** Contains entries for how many times each NSG rule is applied to deny or allow traffic. The status for these rules is collected every 300 seconds. - - - - - -This connector lets you stream your NSG diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity in all your instances. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2223718&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -| | | -|--------------------------|---| -| **Tables Ingested** | `AzureDiagnostics` | -| **Connector Definition Files** | [AzureNSG.JSON](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Network%20Security%20Groups/Data%20Connectors/AzureNSG.JSON) | - -[→ View full connector details](../connectors/azurensg.md) - -## Tables Reference - -This solution ingests data into **1 table(s)**: - -| Table | Used By Connectors | -|-------|-------------------| -| `AzureDiagnostics` | [Network Security Groups](../connectors/azurensg.md) | +This solution may contain other components such as analytics rules, workbooks, hunting queries, or playbooks. [← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/azure-service-bus.md b/Tools/Solutions Analyzer/connector-docs/solutions/azure-service-bus.md index 4ac7e080810..efb53f8d54f 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/azure-service-bus.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/azure-service-bus.md @@ -13,27 +13,8 @@ ## Data Connectors -This solution provides **1 data connector(s)**. +**This solution does not include data connectors.** -### [Azure Service Bus](../connectors/azureservicebus-ccp.md) - -**Publisher:** Microsoft - -Azure Service Bus is a fully managed enterprise message broker with message queues and publish-subscribe topics (in a namespace). This connector lets you stream your Azure Service Bus diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity. - -| | | -|--------------------------|---| -| **Tables Ingested** | `AzureDiagnostics` | -| **Connector Definition Files** | [AzureServiceBus_CCP.JSON](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Service%20Bus/Data%20Connectors/AzureServiceBus_CCP.JSON) | - -[→ View full connector details](../connectors/azureservicebus-ccp.md) - -## Tables Reference - -This solution ingests data into **1 table(s)**: - -| Table | Used By Connectors | -|-------|-------------------| -| `AzureDiagnostics` | [Azure Service Bus](../connectors/azureservicebus-ccp.md) | +This solution may contain other components such as analytics rules, workbooks, hunting queries, or playbooks. [← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/azure-sql-database-solution-for-sentinel.md b/Tools/Solutions Analyzer/connector-docs/solutions/azure-sql-database-solution-for-sentinel.md index 3c9e6892b87..ff4e4dfcd59 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/azure-sql-database-solution-for-sentinel.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/azure-sql-database-solution-for-sentinel.md @@ -13,27 +13,8 @@ ## Data Connectors -This solution provides **1 data connector(s)**. +**This solution does not include data connectors.** -### [Azure SQL Databases](../connectors/azuresql.md) - -**Publisher:** Microsoft - -Azure SQL is a fully managed, Platform-as-a-Service (PaaS) database engine that handles most database management functions, such as upgrading, patching, backups, and monitoring, without necessitating user involvement. This connector lets you stream your Azure SQL databases audit and diagnostic logs into Microsoft Sentinel, allowing you to continuously monitor activity in all your instances. - -| | | -|--------------------------|---| -| **Tables Ingested** | `AzureDiagnostics` | -| **Connector Definition Files** | [template_AzureSql.JSON](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20SQL%20Database%20solution%20for%20sentinel/Data%20Connectors/template_AzureSql.JSON) | - -[→ View full connector details](../connectors/azuresql.md) - -## Tables Reference - -This solution ingests data into **1 table(s)**: - -| Table | Used By Connectors | -|-------|-------------------| -| `AzureDiagnostics` | [Azure SQL Databases](../connectors/azuresql.md) | +This solution may contain other components such as analytics rules, workbooks, hunting queries, or playbooks. [← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/azure-storage.md b/Tools/Solutions Analyzer/connector-docs/solutions/azure-storage.md index 7c106a20732..f0cab3145e2 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/azure-storage.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/azure-storage.md @@ -13,35 +13,8 @@ ## Data Connectors -This solution provides **1 data connector(s)**. +**This solution does not include data connectors.** -### [Azure Storage Account](../connectors/azurestorageaccount.md) - -**Publisher:** Microsoft - -Azure Storage account is a cloud solution for modern data storage scenarios. It contains all your data objects: blobs, files, queues, tables, and disks. This connector lets you stream Azure Storage accounts diagnostics logs into your Microsoft Sentinel workspace, allowing you to continuously monitor activity in all your instances, and detect malicious activity in your organization. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220068&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -| | | -|--------------------------|---| -| **Tables Ingested** | `AzureMetrics` | -| | `StorageBlobLogs` | -| | `StorageFileLogs` | -| | `StorageQueueLogs` | -| | `StorageTableLogs` | -| **Connector Definition Files** | [AzureStorageAccount_CCP.JSON](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Storage/Data%20Connectors/AzureStorageAccount_CCP.JSON) | - -[→ View full connector details](../connectors/azurestorageaccount.md) - -## Tables Reference - -This solution ingests data into **5 table(s)**: - -| Table | Used By Connectors | -|-------|-------------------| -| `AzureMetrics` | [Azure Storage Account](../connectors/azurestorageaccount.md) | -| `StorageBlobLogs` | [Azure Storage Account](../connectors/azurestorageaccount.md) | -| `StorageFileLogs` | [Azure Storage Account](../connectors/azurestorageaccount.md) | -| `StorageQueueLogs` | [Azure Storage Account](../connectors/azurestorageaccount.md) | -| `StorageTableLogs` | [Azure Storage Account](../connectors/azurestorageaccount.md) | +This solution may contain other components such as analytics rules, workbooks, hunting queries, or playbooks. [← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/azure-stream-analytics.md b/Tools/Solutions Analyzer/connector-docs/solutions/azure-stream-analytics.md index 7c8f4dd9069..8cad7346109 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/azure-stream-analytics.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/azure-stream-analytics.md @@ -13,27 +13,8 @@ ## Data Connectors -This solution provides **1 data connector(s)**. +**This solution does not include data connectors.** -### [Azure Stream Analytics](../connectors/azurestreamanalytics-ccp.md) - -**Publisher:** Microsoft - -Azure Stream Analytics is a real-time analytics and complex event-processing engine that is designed to analyze and process high volumes of fast streaming data from multiple sources simultaneously. This connector lets you stream your Azure Stream Analytics hub diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity. - -| | | -|--------------------------|---| -| **Tables Ingested** | `AzureDiagnostics` | -| **Connector Definition Files** | [AzureStreamAnalytics_CCP.JSON](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Stream%20Analytics/Data%20Connectors/AzureStreamAnalytics_CCP.JSON) | - -[→ View full connector details](../connectors/azurestreamanalytics-ccp.md) - -## Tables Reference - -This solution ingests data into **1 table(s)**: - -| Table | Used By Connectors | -|-------|-------------------| -| `AzureDiagnostics` | [Azure Stream Analytics](../connectors/azurestreamanalytics-ccp.md) | +This solution may contain other components such as analytics rules, workbooks, hunting queries, or playbooks. [← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/azure-web-application-firewall-(waf).md b/Tools/Solutions Analyzer/connector-docs/solutions/azure-web-application-firewall-(waf).md index 0ce9e8713b5..425f27d2220 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/azure-web-application-firewall-(waf).md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/azure-web-application-firewall-(waf).md @@ -13,27 +13,8 @@ ## Data Connectors -This solution provides **1 data connector(s)**. +**This solution does not include data connectors.** -### [Azure Web Application Firewall (WAF)](../connectors/waf.md) - -**Publisher:** Microsoft - -Connect to the Azure Web Application Firewall (WAF) for Application Gateway, Front Door, or CDN. This WAF protects your applications from common web vulnerabilities such as SQL injection and cross-site scripting, and lets you customize rules to reduce false positives. Follow these instructions to stream your Microsoft Web application firewall logs into Microsoft Sentinel. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2223546&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -| | | -|--------------------------|---| -| **Tables Ingested** | `AzureDiagnostics` | -| **Connector Definition Files** | [template_WAF.JSON](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Web%20Application%20Firewall%20%28WAF%29/Data%20Connectors/template_WAF.JSON) | - -[→ View full connector details](../connectors/waf.md) - -## Tables Reference - -This solution ingests data into **1 table(s)**: - -| Table | Used By Connectors | -|-------|-------------------| -| `AzureDiagnostics` | [Azure Web Application Firewall (WAF)](../connectors/waf.md) | +This solution may contain other components such as analytics rules, workbooks, hunting queries, or playbooks. [← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/azuredevopsauditing.md b/Tools/Solutions Analyzer/connector-docs/solutions/azuredevopsauditing.md index 0b994cb19cf..cbb76d2f852 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/azuredevopsauditing.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/azuredevopsauditing.md @@ -21,6 +21,34 @@ This solution provides **1 data connector(s)**. The Azure DevOps Audit Logs data connector allows you to ingest audit events from Azure DevOps into Microsoft Sentinel. This data connector is built using the Microsoft Sentinel Codeless Connector Platform, ensuring seamless integration. It leverages the Azure DevOps Audit Logs API to fetch detailed audit events and supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview). These transformations enable parsing of the received audit data into a custom table during ingestion, improving query performance by eliminating the need for additional parsing. By using this connector, you can gain enhanced visibility into your Azure DevOps environment and streamline your security operations. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. + +**Custom Permissions:** +- **Azure DevOps Prerequisite**: Please ensure the following:
1. Register an Entra App in Microsoft Entra Admin Center under App Registrations.
2. In 'API permissions' - add Permissions to 'Azure DevOps - vso.auditlog'.
3. In 'Certificates & secrets' - generate 'Client secret'.
4. In 'Authentication' - add Redirect URI: 'https://portal.azure.com/TokenAuthorize/ExtensionName/Microsoft_Azure_Security_Insights'.
5. In the Azure DevOps settings - enable audit log and set **View audit log** for the user. [Azure DevOps Auditing](https://learn.microsoft.com/en-us/azure/devops/organizations/audit/azure-devops-auditing?view=azure-devops&tabs=preview-page).
6. Ensure the user assigned to connect the data connector has the View audit logs permission explicitly set to Allow at all times. This permission is essential for successful log ingestion. If the permission is revoked or not granted, data ingestion will fail or be interrupted. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect to Azure DevOps to start collecting Audit logs in Microsoft Sentinel.** + +1. Enter the App you have registered. + 2. In the 'Overview' section, copy the Application (client) ID. + 3. Select the 'Endpoints' button, and copy the 'OAuth 2.0 authorization endpoint (v2)' value and the 'OAuth 2.0 token endpoint (v2)' value. + 4. In the 'Certificates & secrets' section, copy the 'Client Secret value', and store it securely. +5. Provide the required information below and click 'Connect'. +- **Token Endpoint**: https://login.microsoftonline.com/{TenantId}/oauth2/v2.0/token +- **Authorization Endpoint**: https://login.microsoftonline.com/{TenantId}/oauth2/v2.0/authorize +- **API Endpoint**: https://auditservice.dev.azure.com/{organizationName}/_apis/audit/auditlog?api-version=7.2-preview +- **OAuth Configuration**: + - App Client ID + - App Client Secret + - Click 'Connect' to authenticate + | | | |--------------------------|---| | **Tables Ingested** | `ADOAuditLogs_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/barracuda-cloudgen-firewall.md b/Tools/Solutions Analyzer/connector-docs/solutions/barracuda-cloudgen-firewall.md index 2a8ebd16fa3..c7a500f118d 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/barracuda-cloudgen-firewall.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/barracuda-cloudgen-firewall.md @@ -21,6 +21,50 @@ This solution provides **1 data connector(s)**. The Barracuda CloudGen Firewall (CGFW) connector allows you to easily connect your Barracuda CGFW logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +**Custom Permissions:** +- **Barracuda CloudGen Firewall**: must be configured to export logs via Syslog + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias CGFWFirewallActivity and load the function code or click [here](https://aka.ms/sentinel-barracudacloudfirewall-parser). The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + +1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. +2. Select **Apply below configuration to my machines** and select the facilities and severities. +3. Click **Save**. + +**2. Configure and connect the Barracuda CloudGen Firewall** + +[Follow instructions](https://aka.ms/sentinel-barracudacloudfirewall-connector) to configure syslog streaming. Use the IP address or hostname for the Linux machine with the Microsoft Sentinel agent installed for the Destination IP address. +- **Open Syslog settings** + | | | |--------------------------|---| | **Tables Ingested** | `Syslog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/barracuda-waf.md b/Tools/Solutions Analyzer/connector-docs/solutions/barracuda-waf.md index 9297e563158..77c55c40d8b 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/barracuda-waf.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/barracuda-waf.md @@ -25,6 +25,31 @@ The Barracuda Web Application Firewall (WAF) connector allows you to easily conn [For more information >​](https://aka.ms/CEF-Barracuda) +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Configure and connect Barracuda WAF** + +The Barracuda Web Application Firewall can integrate with and export logs directly to Microsoft Sentinel via Azure OMS Server.​ + +1. Go to [Barracuda WAF configuration](https://aka.ms/asi-barracuda-connector), and follow the instructions, using the parameters below to set up the connection:. + +2. Web Firewall logs facility: Go to the advanced settings (link below) for your workspace and on the **Data > Syslog** tabs, make sure that the facility exists.​ + +> Notice that the data from all regions will be stored in the selected workspace +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Open Syslog settings** + | | | |--------------------------|---| | **Tables Ingested** | `Barracuda_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/better-mobile-threat-defense-(mtd).md b/Tools/Solutions Analyzer/connector-docs/solutions/better-mobile-threat-defense-(mtd).md index 27f8329979d..62338b2fcf3 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/better-mobile-threat-defense-(mtd).md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/better-mobile-threat-defense-(mtd).md @@ -21,6 +21,33 @@ This solution provides **1 data connector(s)**. The BETTER MTD Connector allows Enterprises to connect their Better MTD instances with Microsoft Sentinel, to view their data in Dashboards, create custom alerts, use it to trigger playbooks and expands threat hunting capabilities. This gives users more insight into their organization's mobile devices and ability to quickly analyze current mobile security posture which improves their overall SecOps capabilities. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +1. In **Better MTD Console**, click on **Integration** on the side bar. +2. Select **Others** tab. +3. Click the **ADD ACCOUNT** button and Select **Microsoft Sentinel** from the available integrations. +4. Create the Integration: + - set `ACCOUNT NAME` to a descriptive name that identifies the integration then click **Next** + - Enter your `WORKSPACE ID` and `PRIMARY KEY` from the fields below, click **Save** + - Click **Done** +5. Threat Policy setup (Which Incidents should be reported to `Microsoft Sentinel`): + - In **Better MTD Console**, click on **Policies** on the side bar + - Click on the **Edit** button of the Policy that you are using. + - For each Incident types that you want to be logged go to **Send to Integrations** field and select **Sentinel** +6. For additional information, please refer to our [Documentation](https://mtd-docs.bmobi.net/integrations/how-to-setup-azure-sentinel-integration#mtd-integration-configuration). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `BetterMTDAppLog_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/beyond-security-besecure.md b/Tools/Solutions Analyzer/connector-docs/solutions/beyond-security-besecure.md index 7d35f2cc5db..02dcc119ca6 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/beyond-security-besecure.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/beyond-security-besecure.md @@ -21,6 +21,37 @@ This solution provides **1 data connector(s)**. The [Beyond Security beSECURE](https://beyondsecurity.com/) connector allows you to easily connect your Beyond Security beSECURE scan events, scan results and audit trail with Azure Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Configure beSECURE** + +Follow the steps below to configure your beSECURE solution to send out scan results, scan status and audit trail to Azure Sentinel. +**1. Access the Integration menu** + + 1.1 Click on the 'More' menu option + +1.2 Select Server + +1.3 Select Integration + +1.4 Enable Azure Sentinel + + **2. Provide Azure Sentinel settings** + + Fill in the Workspace ID and Primary Key values, click 'Modify' + - **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + - **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `beSECURE_Audit_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/bigid.md b/Tools/Solutions Analyzer/connector-docs/solutions/bigid.md index 1dbb107c516..691cbab0222 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/bigid.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/bigid.md @@ -21,6 +21,25 @@ This solution provides **1 data connector(s)**. The [BigID DSPM](https://bigid.com/data-security-posture-management/) data connector provides the capability to ingest BigID DSPM cases with affected objects and datasource information into Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **BigID DSPM API access**: Access to the BigID DSPM API through a BigID Token is required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect to BigID DSPM API to start collecting BigID DSPM cases and affected Objects in Microsoft Sentinel** + +Provide your BigID domain name like 'customer.bigid.cloud' and your BigID token. Generate a token in the BigID console via Settings -> Access Management -> Users -> Select User and generate a token. +- **BigID FQDN**: BigID FQDN +- **BigID Token**: (password field) +- Click 'Connect' to establish connection + | | | |--------------------------|---| | **Tables Ingested** | `BigIDDSPMCatalog_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/bitglass.md b/Tools/Solutions Analyzer/connector-docs/solutions/bitglass.md index 15ad243945c..ab3ccdcd7d5 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/bitglass.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/bitglass.md @@ -21,6 +21,98 @@ This solution provides **1 data connector(s)**. The [Bitglass](https://www.bitglass.com/) data connector provides the capability to retrieve security event logs of the Bitglass services and more events into Microsoft Sentinel through the REST API. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **BitglassToken** and **BitglassServiceURL** are required for making API calls. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Azure Blob Storage API to pull logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Bitglass**](https://aka.ms/sentinel-bitglass-parser) which is deployed with the Microsoft Sentinel Solution. + +**STEP 1 - Configuration steps for the Bitglass Log Retrieval API** + + Follow the instructions to obtain the credentials. + +1. Please contact Bitglass [support](https://pages.bitglass.com/Contact.html) and obtain the **BitglassToken** and **BitglassServiceURL** ntation]. +2. Save credentials for using in the data connector. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Bitglass data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Bitglass data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-bitglass-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. +3. Enter the **BitglassToken**, **BitglassServiceURL** and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Bitglass data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-bitglass-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitglassXXXXX). + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select ** New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + BitglassToken + BitglassServiceURL + WorkspaceID + WorkspaceKey + logAnalyticsUri (optional) +> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `BitglassLogs_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/bitsight.md b/Tools/Solutions Analyzer/connector-docs/solutions/bitsight.md index 04e2cc23a85..d061e124a99 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/bitsight.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/bitsight.md @@ -22,6 +22,237 @@ This solution provides **1 data connector(s)**. The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: BitSight API Token is required. See the documentation to [learn more](https://help.bitsighttech.com/hc/en-us/articles/115014888388-API-Token-Management) about API Token. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the BitSight API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Steps to Create/Get Bitsight API Token** + + Follow these instructions to get a BitSight API Token. + 1. For SPM App: Refer to the [User Preference](https://service.bitsight.com/app/spm/account) tab of your Account page, + Go to Settings > Account > User Preferences > API Token. + 2. For TPRM App: Refer to the [User Preference](https://service.bitsight.com/app/tprm/account) tab of your Account page, + Go to Settings > Account > User Preferences > API Token. + 3. For Classic BitSight: Go to your [Account](https://service.bitsight.com/settings) page, + Go to Settings > Account > API Token. + +**STEP 2 - App Registration steps for the Application in Microsoft Entra ID** + + This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID: + 1. Sign in to the [Azure portal](https://portal.azure.com/). + 2. Search for and select **Microsoft Entra ID**. + 3. Under **Manage**, select **App registrations > New registration**. + 4. Enter a display **Name** for your application. + 5. Select **Register** to complete the initial app registration. + 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of BitSight Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app) + +**STEP 3 - Add a client secret for application in Microsoft Entra ID** + + Sometimes called an application password, a client secret is a string value required for the execution of BitSight Data Connector. Follow the steps in this section to create a new Client Secret: + 1. In the Azure portal, in **App registrations**, select your application. + 2. Select **Certificates & secrets > Client secrets > New client secret**. + 3. Add a description for your client secret. + 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months. + 5. Select **Add**. + 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of BitSight Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret) + +**STEP 4 - Get Object ID of your application in Microsoft Entra ID** + + After creating your app registration, follow the steps in this section to get Object ID: + 1. Go to **Microsoft Entra ID**. + 2. Select **Enterprise applications** from the left menu. + 3. Find your newly created application in the list (you can search by the name you provided). + 4. Click on the application. + 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment. + +**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID** + + Follow the steps in this section to assign the role: + 1. In the Azure portal, Go to **Resource Group** and select your resource group. + 2. Go to **Access control (IAM)** from left panel. + 3. Click on **Add**, and then select **Add role assignment**. + 4. Select **Contributor** as role and click on next. + 5. In **Assign access to**, select `User, group, or service principal`. + 6. Click on **add members** and type **your app name** that you have created and select it. + 7. Now click on **Review + assign** and then again click on **Review + assign**. + +> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal) + +**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the BitSight data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the BitSight API Token. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**7. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the BitSight connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BitSight-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the below information : + + a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. + + b. **API_token** - Enter API Token of your BitSight account. + + c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. + + d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. + + e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. + + f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. + + g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. + + h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. + + i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics "Settings". + + j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. + + k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. + + l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. + + m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. + + n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. + + o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. + + p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. + + q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. + + r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. + + s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. + + t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. + + u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. + + v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. + + w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). + + x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). + + y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. +4. Once all application settings have been entered, click **Review + create** to deploy.. + +**8. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the BitSight data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-BitSight310-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitSightXXXXX). + + e. **Select a runtime:** Choose Python 3.8 or above. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective values (case-sensitive): + + a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. + + b. **API_token** - Enter API Token of your BitSight account. + + c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. + + d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. + + e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. + + f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. + + g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. + + h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. + + i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics "Settings". + + j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. + + k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. + + l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. + + m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. + + n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. + + o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. + + p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. + + q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. + + r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. + + s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. + + t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. + + u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. + + v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. + + w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). + + x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). + + y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. +4. Once all application settings have been entered, click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `BitsightAlerts_data_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/bitwarden.md b/Tools/Solutions Analyzer/connector-docs/solutions/bitwarden.md index c248a85ad0e..e0cc1a7ba9b 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/bitwarden.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/bitwarden.md @@ -22,6 +22,31 @@ This solution provides **1 data connector(s)**. This connector provides insight into activity of your Bitwarden organization such as user's activity (logged in, changed password, 2fa, etc.), cipher activity (created, updated, deleted, shared, etc.), collection activity, organization activity, and more. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +**Custom Permissions:** +- **Bitwarden Client Id and Client Secret**: Your API key can be found in the Bitwarden organization admin console. Please see [Bitwarden documentation](https://bitwarden.com/help/public-api/#authentication) for more information. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Bitwarden Event Logs to Microsoft Sentinel** + +Your API key can be found in the Bitwarden organization admin console. +Please see [Bitwarden documentation](https://bitwarden.com/help/public-api/#authentication) for more information. +Self-hosted Bitwarden servers may need to reconfigure their installation's URL. +- **Bitwarden Identity Url**: https://identity.bitwarden.com +- **Bitwarden Api Url**: https://api.bitwarden.com +- **OAuth Configuration**: + - Client ID + - Client Secret + - Click 'Connect' to authenticate + | | | |--------------------------|---| | **Tables Ingested** | `BitwardenEventLogs_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/blackberry-cylanceprotect.md b/Tools/Solutions Analyzer/connector-docs/solutions/blackberry-cylanceprotect.md index 1eebec4db9f..e96fe01fdf8 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/blackberry-cylanceprotect.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/blackberry-cylanceprotect.md @@ -13,27 +13,8 @@ ## Data Connectors -This solution provides **1 data connector(s)**. +**This solution does not include data connectors.** -### [[Deprecated] Blackberry CylancePROTECT](../connectors/blackberrycylanceprotect.md) - -**Publisher:** Blackberry - -The [Blackberry CylancePROTECT](https://www.blackberry.com/us/en/products/blackberry-protect) connector allows you to easily connect your CylancePROTECT logs with Microsoft Sentinel. This gives you more insight into your organization's network and improves your security operation capabilities. - -| | | -|--------------------------|---| -| **Tables Ingested** | `Syslog` | -| **Connector Definition Files** | [template_BlackberryCylancePROTECT.JSON](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Blackberry%20CylancePROTECT/Data%20Connectors/template_BlackberryCylancePROTECT.JSON) | - -[→ View full connector details](../connectors/blackberrycylanceprotect.md) - -## Tables Reference - -This solution ingests data into **1 table(s)**: - -| Table | Used By Connectors | -|-------|-------------------| -| `Syslog` | [[Deprecated] Blackberry CylancePROTECT](../connectors/blackberrycylanceprotect.md) | +This solution may contain other components such as analytics rules, workbooks, hunting queries, or playbooks. [← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/bloodhound-enterprise.md b/Tools/Solutions Analyzer/connector-docs/solutions/bloodhound-enterprise.md index 936ba3cada4..c5f3bf219b1 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/bloodhound-enterprise.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/bloodhound-enterprise.md @@ -22,6 +22,161 @@ This solution provides **1 data connector(s)**. The solution is designed to test Bloodhound Enterprise package creation process. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **BloodHound Enterprise API key & Id** is required. See the documentation to learn more about API on the `https://bloodhound.specterops.io/integrations/bloodhound-api/working-with-api`. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to a 'BloodHound Enterprise' to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +**STEP 1 - Retrieve BloodHound Enterprise API Key and ID** + +To enable the Azure Function to authenticate successfully and pull logs into Microsoft Sentinel, you must first obtain the API Key and ID from your BloodHound Enterprise instance. See the documentation to learn more about API on the `https://bloodhound.specterops.io/integrations/bloodhound-api/working-with-api`. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the 'BloodHound Enterprise' connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the 'BloodHound Enterprise' API authorization key(s) or Token, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the 'BloodHound Enterprise' connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)]() +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Tenant URL**, **API Key**, **API ID** 'and/or Other required fields'. +>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**Option 2 - Manual Deployment of Azure Functions** + + Use the following step-by-step instructions to deploy the 'BloodHound Enterprise' connector manually with Azure Functions. + +**1. Create a Function App** + +1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp). +2. Click **+ Create** at the top. +3. In the **Basics** tab, ensure Runtime stack is set to **python 3.11**. +4. In the **Hosting** tab, ensure **Plan type** is set to **'Consumption (Serverless)'**. +5.select Storage account +6. 'Add other required configurations'. +5. 'Make other preferable configuration changes', if needed, then click **Create**. + +**2. Import Function App Code(Zip deployment)** + +1. Install Azure CLI +2. From terminal type **az functionapp deployment source config-zip -g -n --src ** and hit enter. Set the `ResourceGroup` value to: your resource group name. Set the `FunctionApp` value to: your newly created function app name. Set the `Zip File` value to: `digitalshadowsConnector.zip`(path to your zip file). Note:- Download the zip file from the link - [Function App Code](https://github.com/metron-labs/Azure-Sentinel/blob/bloodhound/Solutions/BloodHound/Data%20Connectors/BloodHoundAzureFunction.zip) + +**3. Configure the Function App** + +1. In the Function App screen, click the Function App name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following 'x (number of)' application settings individually, under Name, with their respective string values (case-sensitive) under Value: + DigitalShadowsAccountID + WorkspaceID + WorkspaceKey + DigitalShadowsKey + DigitalShadowsSecret + HistoricalDays + DigitalShadowsURL + ClassificationFilterOperation + HighVariabilityClassifications + FUNCTION_NAME + logAnalyticsUri (optional) +(add any other settings required by the Function App) +Set the `DigitalShadowsURL` value to: `https://api.searchlight.app/v1` +Set the `HighVariabilityClassifications` value to: `exposed-credential,marked-document` +Set the `ClassificationFilterOperation` value to: `exclude` for exclude function app or `include` for include function app +>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Azure Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: https://.ods.opinsights.azure.us. +4. Once all application settings have been entered, click **Save**. + +****STEP 3 - Register the Application in Microsoft Entra ID** + + 1. **Open the [Microsoft Entra ID page](https://entra.microsoft.com/)**: + - Click the provided link to open the **Microsoft Entra ID** registration page in a new tab. + - Ensure you are logged in with an account that has **Admin level** permissions. + +2. **Create a New Application**: + - In the **Microsoft Entra ID portal**, select **App registrations** mentioned on the left-hand side tab. + - Click on **+ New registration**. + - Fill out the following fields: + - **Name**: Enter a name for the app (e.g., “BloodHound App”). + - **Supported account types**: Choose **Accounts in this organizational directory only** (Default Directory only - Single tenant). + - **Redirect URI**: Leave this blank unless required otherwise. + - Click **Register** to create the application. + +3. **Copy Application and Tenant IDs**: + - Once the app is registered, note the **Application (client) ID** and **Directory (tenant) ID** from the **Overview** page. You’ll need these for the integration. + +4. **Create a Client Secret**: + - In the **Certificates & secrets** section, click **+ New client secret**. + - Add a description (e.g., 'BloodHound Secret') and set an expiration (e.g., 1 year). + - Click **Add**. + - **Copy the client secret value immediately**, as it will not be shown again. + + ****STEP 4 - Assign the "Monitoring Metrics Publisher" Role to the App** + + 1. **Open the Resource Group in Azure Portal**: + - Navigate to the **Resource Group** that contains the **Log Analytics Workspace** and **Data Collection Rules (DCRs)** where you want the app to push data. + +2. **Assign the Role**: + - In the **Resource Group** menu, click on **Access control (IAM)** mentioned on the left-hand side tab .. + - Click on **+ Add** and select **Add role assignment**. + - In the **Role** dropdown, search for and select the **Monitoring Metrics Publisher** role. + - Under **Assign access to**, choose **Azure AD user, group, or service principal**. + - In the **Select** field, search for your registered app by **name** or **client ID**. + - Click **Save** to assign the role to the application. + + ****STEP 5 - Deploy the ARM Template** + + 1. **Retrieve the Workspace ID**: + - After assigning the role, you will need the **Workspace ID**. + - Navigate to the **Log Analytics Workspace** within the **Resource Group**. + - In the **Overview** section, locate the **Workspace ID** field under **Workspace details**. + - **Copy the Workspace ID** and keep it handy for the next steps. + +2. **Click the Deploy to Azure Button**: + - [![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fmetron-labs%2FAzure-Sentinel%2Fbloodhound%2FSolutions%2FBloodHound%2FData%2520Connectors%2FDeployToAzure.json). + - This will take you directly to the Azure portal to start the deployment. + +3. **Review and Customize Parameters**: + - On the custom deployment page, ensure you’re deploying to the correct **subscription** and **resource group**. + - Fill in the parameters like **workspace name**, **workspace ID**, and **workspace location**. + +4. **Click Review + Create** and then **Create** to deploy the resources. + + ****STEP 6 - Verify DCE, DCR, and Log Analytics Table Setup** + + 1. **Check the Data Collection Endpoint (DCE)**: + - After deploying, go to **Azure Portal > Data Collection Endpoints**. + - Verify that the **BloodHoundDCE** endpoint has been created successfully. + - **Copy the DCE Logs Ingestion URI**, as you’ll need this for generating the webhook URL. + +2. **Confirm Data Collection Rule (DCR) Setup**: + - Go to **Azure Portal > Data Collection Rules**. + - Ensure the **BloodHoundDCR** rule is present. + - **Copy the Immutable ID** of the DCR from the Overview page, as you’ll need it for the webhook URL. + +3. **Validate Log Analytics Table**: + - Navigate to your **Log Analytics Workspace** (linked to Microsoft Sentinel). + - Under the **Tables** section, verify that the **BloodHoundTable_CL** table has been created successfully and is ready to receive data. + | | | |--------------------------|---| | **Tables Ingested** | `BHEAttackPathsData_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/box.md b/Tools/Solutions Analyzer/connector-docs/solutions/box.md index dd9babbf516..b320aecb143 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/box.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/box.md @@ -25,6 +25,44 @@ This solution provides **2 data connector(s)**. The Box data connector provides the capability to ingest [Box enterprise's events](https://developer.box.com/guides/events/#admin-events) into Microsoft Sentinel using the Box REST API. Refer to [Box documentation](https://developer.box.com/guides/events/enterprise-events/for-enterprise/) for more information. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **Box API credentials**: Box API requires a Box App client ID and client secret to authenticate. [See the documentation to learn more about Client Credentials grant](https://developer.box.com/guides/authentication/client-credentials/client-credentials-setup/) +- **Box Enterprise ID**: Box Enterprise ID is required to make the connection. See documentation to [find Enterprise ID](https://developer.box.com/platform/appendix/locating-values/) + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Codeless Connecor Platform (CCP) to connect to the Box REST API to pull logs into Microsoft Sentinel. + +>**NOTE:** This connector depends on a parser based on Kusto Function to work as expected [**BoxEvents**](https://aka.ms/sentinel-BoxDataConnector-parser) which is deployed with the Microsoft Sentinel Solution. + +**STEP 1 - Create Box Custom Application** + +See documentation to [setup client credentials authentication](https://developer.box.com/guides/authentication/client-credentials/client-credentials-setup/) + +**STEP 2 - Grab Client ID and Client Secret values** + +You might need to setup 2FA to fetch the secret. + +**STEP 3 - Grab Box Enterprise ID from Box Admin Console** + +See documentation to [find Enterprise ID](https://developer.box.com/platform/appendix/locating-values/) + +**4. Connect to Box to start collecting event logs to Microsoft Sentinel** + +Provide the required values below: +- **Box Enterprise ID**: 123456 +- **OAuth Configuration**: + - Client ID + - Client Secret + - Click 'Connect' to authenticate + | | | |--------------------------|---| | **Tables Ingested** | `BoxEventsV2_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/broadcom-symantecdlp.md b/Tools/Solutions Analyzer/connector-docs/solutions/broadcom-symantecdlp.md index 26e5d363bb9..5bd1d0263da 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/broadcom-symantecdlp.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/broadcom-symantecdlp.md @@ -25,6 +25,63 @@ This solution provides **2 data connector(s)**. The [Broadcom Symantec Data Loss Prevention (DLP)](https://www.broadcom.com/products/cyber-security/information-protection/data-loss-prevention) connector allows you to easily connect your Symantec DLP with Microsoft Sentinel, to create custom dashboards, alerts, and improve investigation. This gives you more insight into your organization’s information, where it travels, and improves your security operation capabilities. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias SymantecDLP and load the function code or click [here](https://aka.ms/sentinel-symantecdlp-parser). The function usually takes 10-15 minutes to activate after solution installation/update. +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Symantec DLP logs to a Syslog agent** + + Configure Symantec DLP to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent. +1. [Follow these instructions](https://knowledge.broadcom.com/external/article/159509/generating-syslog-messages-from-data-los.html) to configure the Symantec DLP to forward syslog +2. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/check-point-cloudguard-cnapp.md b/Tools/Solutions Analyzer/connector-docs/solutions/check-point-cloudguard-cnapp.md index ba23c697efb..ce7087be301 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/check-point-cloudguard-cnapp.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/check-point-cloudguard-cnapp.md @@ -21,6 +21,28 @@ This solution provides **1 data connector(s)**. The [CloudGuard](https://sc1.checkpoint.com/documents/CloudGuard_Dome9/Documentation/Overview/CloudGuard-CSPM-Introduction.htm?cshid=help_center_documentation) data connector enables the ingestion of security events from the CloudGuard API into Microsoft Sentinel™, using Microsoft Sentinel’s Codeless Connector Platform. The connector supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) which parses incoming security event data into custom columns. This pre-parsing process eliminates the need for query-time parsing, resulting in improved performance for data queries. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **CloudGuard API Key**: Refer to the instructions provided [here](https://sc1.checkpoint.com/documents/CloudGuard_Dome9/Documentation/Settings/Users-Roles.htm#add_service) to generate an API key. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect CloudGuard Security Events to Microsoft Sentinel** + +To enable the CloudGuard connector for Microsoft Sentinel, enter the required information below and select Connect. +> +- **API Key ID**: api_key +- **API Key Secret**: (password field) +- **CloudGuard Endpoint URL**: e.g. https://api.dome9.com +- **Filter**: Paste filter from CloudGuard +- Click 'Connect' to establish connection + | | | |--------------------------|---| | **Tables Ingested** | `CloudGuard_SecurityEvents_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/check-point-cyberint-alerts.md b/Tools/Solutions Analyzer/connector-docs/solutions/check-point-cyberint-alerts.md index d82b0aff2c4..e89ca0e85d2 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/check-point-cyberint-alerts.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/check-point-cyberint-alerts.md @@ -21,6 +21,27 @@ This solution provides **1 data connector(s)**. Cyberint, a Check Point company, provides a Microsoft Sentinel integration to streamline critical Alerts and bring enriched threat intelligence from the Infinity External Risk Management solution into Microsoft Sentinel. This simplifies the process of tracking the status of tickets with automatic sync updates across systems. Using this new integration for Microsoft Sentinel, existing Cyberint and Microsoft Sentinel customers can easily pull logs based on Cyberint's findings into Microsoft Sentinel platform. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **Check Point Cyberint API Key, Argos URL, and Customer Name**: The connector API key, Argos URL, and Customer Name are required + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Checkpoint Cyberint Alerts to Microsoft Sentinel** + +To enable the connector provide the required information below and click on Connect. +> +- **Argos URL**: Argos URL +- **API Token**: (password field) +- **Customer Name**: Customer Name +- Click 'Connect' to establish connection + | | | |--------------------------|---| | **Tables Ingested** | `argsentdc_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/check-point-cyberint-ioc.md b/Tools/Solutions Analyzer/connector-docs/solutions/check-point-cyberint-ioc.md index 7ed27463705..9fc3ae5a7d7 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/check-point-cyberint-ioc.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/check-point-cyberint-ioc.md @@ -21,6 +21,26 @@ This solution provides **1 data connector(s)**. This is data connector for Check Point Cyberint IOC. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **Check Point Cyberint API Key and Argos URL**: The connector API key and Argos URL are required + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Checkpoint Cyberint Alerts to Microsoft Sentinel** + +To enable the connector provide the required information below and click on Connect. +> +- **Argos URL**: Argos URL +- **API key**: API key +- Click 'Connect' to establish connection + | | | |--------------------------|---| | **Tables Ingested** | `iocsent_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/cisco-aci.md b/Tools/Solutions Analyzer/connector-docs/solutions/cisco-aci.md index 621290827c7..31fb79df425 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/cisco-aci.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/cisco-aci.md @@ -21,6 +21,59 @@ This solution provides **1 data connector(s)**. [Cisco Application Centric Infrastructure (ACI)](https://www.cisco.com/c/en/us/solutions/collateral/data-center-virtualization/application-centric-infrastructure/solution-overview-c22-741487.html) data connector provides the capability to ingest [Cisco ACI logs](https://www.cisco.com/c/en/us/td/docs/switches/datacenter/aci/apic/sw/all/syslog/guide/b_ACI_System_Messages_Guide/m-aci-system-messages-reference.html) into Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**CiscoACIEvent**](https://aka.ms/sentinel-CiscoACI-parser) which is deployed with the Microsoft Sentinel Solution. + +>**NOTE:** This data connector has been developed using Cisco ACI Release 1.x + +**1. Configure Cisco ACI system sending logs via Syslog to remote server where you will install the agent.** + +[Follow these steps](https://www.cisco.com/c/en/us/td/docs/switches/datacenter/aci/apic/sw/1-x/basic-config/b_ACI_Config_Guide/b_ACI_Config_Guide_chapter_010.html#d2933e4611a1635) to configure Syslog Destination, Destination Group, and Syslog Source. + +**2. Install and onboard the agent for Linux or Windows** + +Install the agent on the Server to which the logs will be forwarded. + +> Logs on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**3. Check logs in Microsoft Sentinel** + +Open Log Analytics to check if the logs are received using the Syslog schema. + +>**NOTE:** It may take up to 15 minutes before new logs will appear in Syslog table. + | | | |--------------------------|---| | **Tables Ingested** | `Syslog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/cisco-etd.md b/Tools/Solutions Analyzer/connector-docs/solutions/cisco-etd.md index d99726195b2..5eee1aaa372 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/cisco-etd.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/cisco-etd.md @@ -20,6 +20,41 @@ This solution provides **1 data connector(s)**. The connector fetches data from ETD api for threat analysis +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Email Threat Defense API, API key, Client ID and Secret**: Ensure you have the API key, Client ID and Secret key. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the ETD API to pull its logs into Microsoft Sentinel. + +**Follow the deployment steps to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the ETD data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**2. Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Cisco ETD data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CiscoETD-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Region**. +3. Enter the **WorkspaceID**, **SharedKey**, **ClientID**, **ClientSecret**, **ApiKey**, **Verdicts**, **ETD Region** +4. Click **Create** to deploy. + | | | |--------------------------|---| | **Tables Ingested** | `CiscoETD_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/cisco-firepower-estreamer.md b/Tools/Solutions Analyzer/connector-docs/solutions/cisco-firepower-estreamer.md index 5d77ba0f304..991cc12ca0a 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/cisco-firepower-estreamer.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/cisco-firepower-estreamer.md @@ -25,6 +25,74 @@ This solution provides **2 data connector(s)**. eStreamer is a Client Server API designed for the Cisco Firepower NGFW Solution. The eStreamer client requests detailed event data on behalf of the SIEM or logging solution in the Common Event Format (CEF). +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Install the Firepower eNcore client** + + Install and configure the Firepower eNcore eStreamer client, for more details see full install [guide](https://www.cisco.com/c/en/us/td/docs/security/firepower/670/api/eStreamer_enCore/eStreamereNcoreSentinelOperationsGuide_409.html) +**1. Download the Firepower Connector from github** + + Download the latest version of the Firepower eNcore connector for Microsoft Sentinel [here](https://github.com/CiscoSecurity/fp-05-microsoft-sentinel-connector). If you plan on using python3 use the [python3 eStreamer connector](https://github.com/CiscoSecurity/fp-05-microsoft-sentinel-connector/tree/python3) + + **2. Create a pkcs12 file using the Azure/VM Ip Address** + + Create a pkcs12 certificate using the public IP of the VM instance in Firepower under System->Integration->eStreamer, for more information please see install [guide](https://www.cisco.com/c/en/us/td/docs/security/firepower/670/api/eStreamer_enCore/eStreamereNcoreSentinelOperationsGuide_409.html#_Toc527049443) + + **3. Test Connectivity between the Azure/VM Client and the FMC** + + Copy the pkcs12 file from the FMC to the Azure/VM instance and run the test utility (./encore.sh test) to ensure a connection can be established, for more details please see the setup [guide](https://www.cisco.com/c/en/us/td/docs/security/firepower/670/api/eStreamer_enCore/eStreamereNcoreSentinelOperationsGuide_409.html#_Toc527049430) + + **4. Configure encore to stream data to the agent** + + Configure encore to stream data via TCP to the Microsoft Agent, this should be enabled by default, however, additional ports and streaming protocols can configured depending on your network security posture, it is also possible to save the data to the file system, for more information please see [Configure Encore](https://www.cisco.com/c/en/us/td/docs/security/firepower/670/api/eStreamer_enCore/eStreamereNcoreSentinelOperationsGuide_409.html#_Toc527049433) + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/cisco-ise.md b/Tools/Solutions Analyzer/connector-docs/solutions/cisco-ise.md index 640a6483f27..a43da24da78 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/cisco-ise.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/cisco-ise.md @@ -21,6 +21,47 @@ This solution provides **1 data connector(s)**. The Cisco Identity Services Engine (ISE) data connector provides the capability to ingest [Cisco ISE](https://www.cisco.com/c/en/us/products/security/identity-services-engine/index.html) events into Microsoft Sentinel. It helps you gain visibility into what is happening in your network, such as who is connected, which applications are installed and running, and much more. Refer to [Cisco ISE logging mechanism documentation](https://www.cisco.com/c/en/us/td/docs/security/ise/2-7/admin_guide/b_ise_27_admin_guide/b_ISE_admin_27_maintain_monitor.html#reference_BAFBA5FA046A45938810A5DF04C00591) for more information. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-ciscoise-parser) to create the Kusto Functions alias, **CiscoISEEvent** + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + +1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. +2. Select **Apply below configuration to my machines** and select the facilities and severities. +3. Click **Save**. +- **Open Syslog settings** + +**3. Configure Cisco ISE Remote Syslog Collection Locations** + +[Follow these instructions](https://www.cisco.com/c/en/us/td/docs/security/ise/2-7/admin_guide/b_ise_27_admin_guide/b_ISE_admin_27_maintain_monitor.html#ID58) to configure remote syslog collection locations in your Cisco ISE deployment. + | | | |--------------------------|---| | **Tables Ingested** | `Syslog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/cisco-meraki-events-via-rest-api.md b/Tools/Solutions Analyzer/connector-docs/solutions/cisco-meraki-events-via-rest-api.md index 4e18adbf472..221ab89c106 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/cisco-meraki-events-via-rest-api.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/cisco-meraki-events-via-rest-api.md @@ -31,6 +31,33 @@ The [Cisco Meraki](https://aka.ms/ciscomeraki) connector allows you to easily co 3. Audit Event +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +**Custom Permissions:** +- **Cisco Meraki REST API Key**: Enable API access in Cisco Meraki and generate API Key. Please refer to Cisco Meraki official [documentation](https://aka.ms/ciscomerakiapikey) for more information. +- **Cisco Meraki Organization Id**: Obtain your Cisco Meraki organization id to fetch security events. Follow the steps in the [documentation](https://aka.ms/ciscomerakifindorg) to obtain the Organization Id using the Meraki API Key obtained in previous step. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Cisco Meraki events to Microsoft Sentinel** + +Currently, this connector allows to ingest events from the following [Cisco Meraki REST API](https://developer.cisco.com/meraki/api-v1/#!get-organization-appliance-security-events) endpoint: + 1. [Get Organization Appliance Security Events](https://developer.cisco.com/meraki/api-latest/#!get-organization-appliance-security-events) +>This connector parses **IDS Alert** events into ASimNetworkSessionLogs Table and **File Scanned** events into ASimWebSessionLogs Table. + 2. [Get Organization Api Requests](https://developer.cisco.com/meraki/api-latest/#!get-organization-api-requests) +>This connector parses events into ASimWebSessionLogs Table. + 3. [Get Organization Configuration Changes](https://developer.cisco.com/meraki/api-latest/#!get-organization-configuration-changes) +>This connector parses events into ASimAuditEventLogs Table. +- **Organization Id**: OrganizationId +- **API Key**: (password field) +- Click 'Connect' to establish connection + | | | |--------------------------|---| | **Tables Ingested** | `ASimAuditEventLogs` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/cisco-sd-wan.md b/Tools/Solutions Analyzer/connector-docs/solutions/cisco-sd-wan.md index 2c0b4475340..db22b6f778a 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/cisco-sd-wan.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/cisco-sd-wan.md @@ -22,6 +22,127 @@ This solution provides **1 data connector(s)**. The Cisco Software Defined WAN(SD-WAN) data connector provides the capability to ingest [Cisco SD-WAN](https://www.cisco.com/c/en_in/solutions/enterprise-networks/sd-wan/index.html) Syslog and Netflow data into Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**To ingest Cisco SD-WAN Syslog and Netflow data into Microsoft Sentinel follow the steps below.** + +**1. Steps to ingest Syslog data to Microsoft sentinel** + +Azure Monitor Agent will be used to collect the syslog data into Microsoft sentinel. For that first need to create an azure arc server for the VM from which syslog data will be sent. + +**1.1 Steps to Add Azure Arc Server** + +1. In Azure portal, go to Servers - Azure Arc and click on Add. +2. Select Generate Script under Add a single server section. A User can also generate scripts for Multiple Servers as well. +3. Review the information on the Prerequisites page, then select Next. +4. On the Resource details page, provide the subscription and resource group of the Microsoft Sentinel, Region, Operating system and Connectivity method. Then select Next. +5. On the Tags page, review the default Physical location tags suggested and enter a value, or specify one or more Custom tags to support your standards. Then select Next +6. Select Download to save the script file. +7. Now that you have generated the script, the next step is to run it on the server that you want to onboard to Azure Arc. +8. If you have Azure VM follow the steps mentioned in the [link](https://learn.microsoft.com/azure/azure-arc/servers/plan-evaluate-on-azure-virtual-machine) before running the script. +9. Run the script by the following command: `./.sh` +10. After you install the agent and configure it to connect to Azure Arc-enabled servers, go to the Azure portal to verify that the server has successfully connected. View your machine in the Azure portal. +> **Reference link:** [https://learn.microsoft.com/azure/azure-arc/servers/learn/quick-enable-hybrid-vm](https://learn.microsoft.com/azure/azure-arc/servers/learn/quick-enable-hybrid-vm) + +**1.2 Steps to Create Data Collection Rule (DCR)** + +1. In Azure Portal search for Monitor. Under Settings, select Data Collection Rules and Select Create. +2. On the Basics panel, enter the Rule Name, Subscription, Resource group, Region and Platform Type. +3. Select Next: Resources. +4. Select Add resources.Use the filters to find the virtual machine that you'll use to collect logs. +5. Select the virtual machine. Select Apply. +6. Select Next: Collect and deliver. +7. Select Add data source. For Data source type, select Linux syslog. +8. For Minimum log level, leave the default values LOG_DEBUG. +9. Select Next: Destination. +10. Select Add destination and add Destination type, Subscription and Account or namespace. +11. Select Add data source. Select Next: Review + create. +12. Select Create. Wait for 20 minutes. In Microsoft Sentinel or Azure Monitor, verify that the Azure Monitor agent is running on your VM. +> **Reference link:** [https://learn.microsoft.com/azure/sentinel/forward-syslog-monitor-agent](https://learn.microsoft.com/azure/sentinel/forward-syslog-monitor-agent) + +**2. Steps to ingest Netflow data to Microsoft sentinel** + +To Ingest Netflow data into Microsoft sentinel, Filebeat and Logstash needs to be installed and configured on the VM. After the configuration, vm will be able to receive netflow data on the configured port and that data will be ingested into the workspace of Microsoft sentinel. + +**2.1 Install filebeat and logstash** + +1. For the installation of filebeat and logstash using apt refer to this doc: + 1. Filebeat: [https://www.elastic.co/guide/en/beats/filebeat/current/setup-repositories.html](https://www.elastic.co/guide/en/beats/filebeat/current/setup-repositories.html). + 2. Logstash: [https://www.elastic.co/guide/en/logstash/current/installing-logstash.html](https://www.elastic.co/guide/en/logstash/current/installing-logstash.html). +2. For the installation of filebeat and logstash for RedHat based Linux (yum) steps are as follows: + 1. Filebeat: [https://www.elastic.co/guide/en/beats/filebeat/current/setup-repositories.html#_yum](https://www.elastic.co/guide/en/beats/filebeat/current/setup-repositories.html#_yum). + 2. Logstash: [https://www.elastic.co/guide/en/logstash/current/installing-logstash.html#_yum](https://www.elastic.co/guide/en/logstash/current/installing-logstash.html#_yum) + +**2.2 Configure Filebeat to send events to Logstash** + +1. Edit filebeat.yml file: `vi /etc/filebeat/filebeat.yml` +2. Comment out the Elasticsearch Output section. +3. Uncomment Logstash Output section (Uncomment out only these two lines)- + output.logstash + hosts: ["localhost:5044"] +3. In the Logstash Output section, if you want to send the data other than the default port i.e. 5044 port, then replace the port number in the hosts field. (Note: This port should be added in the conf file, while configuring logstash.) +4. In the 'filebeat.inputs' section comment out existing configuration and add the following configuration: + - type: netflow + max_message_size: 10KiB + host: "0.0.0.0:2055" + protocols: [ v5, v9, ipfix ] + expiration_timeout: 30m + queue_size: 8192 + custom_definitions: + - /etc/filebeat/custom.yml + detect_sequence_reset: true + enabled: true +6. In the Filebeat inputs section, if you want to receive the data other than the default port i.e. 2055 port, then replace the port number in the host field. +7. Add the provided [custom.yml](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/Cisco%20SD-WAN/Data%20Connectors/custom.yml) file inside the /etc/filebeat/ directory. +8. Open the filebeat input and output port in the firewall. + 1. Run command: `firewall-cmd --zone=public --permanent --add-port=2055/udp` + 2. Run command: `firewall-cmd --zone=public --permanent --add-port=5044/udp` +> Note: if a custom port is added for filebeat input/output, then open that port in the firewall. + +**2.3 Configure Logstash to send events to Microsoft Sentinel** + +1. Install the Azure Log Analytics plugin: + 1. Run Command: `sudo /usr/share/logstash/bin/logstash-plugin install microsoft-logstash-output-azure-loganalytics` +3. Store the Log Analytics workspace key in the Logstash key store. The workspace key can be found in Azure Portal under Log analytic workspace > Select workspace > Under Settings select Agent > Log Analytics agent instructions. +4. Copy the Primary key and run the following commands: + 1. `sudo /usr/share/logstash/bin/logstash-keystore --path.settings /etc/logstash create LogAnalyticsKey` + 2. `sudo /usr/share/logstash/bin/logstash-keystore --path.settings /etc/logstash add LogAnalyticsKey` +5. Create the configuration file /etc/logstash/cisco-netflow-to-sentinel.conf: + input { + beats { + port => #(Enter output port number which has been configured during filebeat configuration i.e. filebeat.yml file .) + } + } + output { + microsoft-logstash-output-azure-loganalytics { + workspace_id => "" + workspace_key => "${LogAnalyticsKey}" + custom_log_table_name => "CiscoSDWANNetflow" + } + } +> Note: If table is not present in Microsoft sentinel, then it will create a new table in sentinel. + +**2.4 Run Filebeat:** + +1. Open a terminal and run the command: +> `systemctl start filebeat` +2. This command will start running filebeat in the background. To see the logs stop the filebeat (`systemctl stop filebeat`) then run the following command: +> `filebeat run -e` + +**2.5 Run Logstash:** + +1. In another terminal run the command: +> `/usr/share/logstash/bin/logstash --path.settings /etc/logstash -f /etc/logstash/cisco-netflow-to-sentinel.conf &` +2. This command will start running the logstash in the background. To see the logs of logstash kill the above process and run the following command : +> `/usr/share/logstash/bin/logstash --path.settings /etc/logstash -f /etc/logstash/cisco-netflow-to-sentinel.conf` + | | | |--------------------------|---| | **Tables Ingested** | `CiscoSDWANNetflow_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/cisco-secure-cloud-analytics.md b/Tools/Solutions Analyzer/connector-docs/solutions/cisco-secure-cloud-analytics.md index f28a27b2c03..6e647611fdc 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/cisco-secure-cloud-analytics.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/cisco-secure-cloud-analytics.md @@ -21,6 +21,64 @@ This solution provides **1 data connector(s)**. The [Cisco Secure Cloud Analytics](https://www.cisco.com/c/en/us/products/security/stealthwatch/index.html) data connector provides the capability to ingest [Cisco Secure Cloud Analytics events](https://www.cisco.com/c/dam/en/us/td/docs/security/stealthwatch/management_console/securit_events_alarm_categories/7_4_2_Security_Events_and_Alarm_Categories_DV_2_1.pdf) into Microsoft Sentinel. Refer to [Cisco Secure Cloud Analytics documentation](https://www.cisco.com/c/dam/en/us/td/docs/security/stealthwatch/system_installation_configuration/7_5_0_System_Configuration_Guide_DV_1_3.pdf) for more information. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**StealthwatchEvent**](https://aka.ms/sentinel-stealthwatch-parser) which is deployed with the Microsoft Sentinel Solution. + +>**NOTE:** This data connector has been developed using Cisco Secure Cloud Analytics version 7.3.2 + +**1. Install and onboard the agent for Linux or Windows** + +Install the agent on the Server where the Cisco Secure Cloud Analytics logs are forwarded. + +> Logs from Cisco Secure Cloud Analytics Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure Cisco Secure Cloud Analytics event forwarding** + +Follow the configuration steps below to get Cisco Secure Cloud Analytics logs into Microsoft Sentinel. +1. Log in to the Stealthwatch Management Console (SMC) as an administrator. +2. In the menu bar, click **Configuration** **>** **Response Management**. +3. From the **Actions** section in the **Response Management** menu, click **Add > Syslog Message**. +4. In the Add Syslog Message Action window, configure parameters. +5. Enter the following custom format: +|Lancope|Stealthwatch|7.3|{alarm_type_id}|0x7C|src={source_ip}|dst={target_ip}|dstPort={port}|proto={protocol}|msg={alarm_type_description}|fullmessage={details}|start={start_active_time}|end={end_active_time}|cat={alarm_category_name}|alarmID={alarm_id}|sourceHG={source_host_group_names}|targetHG={target_host_group_names}|sourceHostSnapshot={source_url}|targetHostSnapshot={target_url}|flowCollectorName={device_name}|flowCollectorIP={device_ip}|domain={domain_name}|exporterName={exporter_hostname}|exporterIPAddress={exporter_ip}|exporterInfo={exporter_label}|targetUser={target_username}|targetHostname={target_hostname}|sourceUser={source_username}|alarmStatus={alarm_status}|alarmSev={alarm_severity_name} + +6. Select the custom format from the list and click **OK** +7. Click **Response Management > Rules**. +8. Click **Add** and select **Host Alarm**. +9. Provide a rule name in the **Name** field. +10. Create rules by selecting values from the Type and Options menus. To add more rules, click the ellipsis icon. For a Host Alarm, combine as many possible types in a statement as possible. + | | | |--------------------------|---| | **Tables Ingested** | `Syslog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/cisco-secure-endpoint.md b/Tools/Solutions Analyzer/connector-docs/solutions/cisco-secure-endpoint.md index d1214a4eb15..a7ff7fac031 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/cisco-secure-endpoint.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/cisco-secure-endpoint.md @@ -26,6 +26,51 @@ This solution provides **2 data connector(s)**. The Cisco Secure Endpoint (formerly AMP for Endpoints) data connector provides the capability to ingest Cisco Secure Endpoint [audit logs](https://developer.cisco.com/docs/secure-endpoint/auditlog/) and [events](https://developer.cisco.com/docs/secure-endpoint/v1-api-reference-event/) into Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **Cisco Secure Endpoint API Credentials/Regions**: To create API Credentials and to understand the regions, follow the document link provided here. [Click here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Endpoint/Data%20Connectors/README.md). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Cisco Secure Endpoint to Microsoft Sentinel** + +To ingest data from Cisco Secure Endpoint to Microsoft Sentinel, you have to click on Add Account button below, then you get a pop up to fill the details like Email, Organization, Client ID, API Key and Region, provide the required information and click on Connect. You can see the connected organizations/emails in the below grid. +> +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Organization** +- **Email** +- **Endpoint** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + +**Add Account** + +*Add Account* + +When you click the "Add Account" button in the portal, a configuration form will open. You'll need to provide: + +- **Cisco Secure Endpoint Email** (optional): Enter your Cisco Email +- **Cisco Secure Endpoint Organization** (optional): Enter the name of your Organization +- **Cisco Secure Endpoint Client ID** (optional): Enter your Client ID +- **Cisco Secure Endpoint API Key** (optional): Enter your API Key +- **Cisco Secure Endpoint Region** (optional): Enter the region you want to connect + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + | | | |--------------------------|---| | **Tables Ingested** | `CiscoSecureEndpointAuditLogsV2_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/cisco-ucs.md b/Tools/Solutions Analyzer/connector-docs/solutions/cisco-ucs.md index 30bdf8e0ec0..5614fd1d21c 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/cisco-ucs.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/cisco-ucs.md @@ -21,6 +21,49 @@ This solution provides **1 data connector(s)**. The [Cisco Unified Computing System (UCS)](https://www.cisco.com/c/en/us/products/servers-unified-computing/index.html) connector allows you to easily connect your Cisco UCS logs with Microsoft Sentinel This gives you more insight into your organization's network and improves your security operation capabilities. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +**Custom Permissions:** +- **Cisco UCS**: must be configured to export logs via Syslog + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias CiscoUCS and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20UCS/Parsers/CiscoUCS.yaml). The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. + 2. Select **Apply below configuration to my machines** and select the facilities and severities. + 3. Click **Save**. +- **Open Syslog settings** + +**3. Configure and connect the Cisco UCS** + +[Follow these instructions](https://www.cisco.com/c/en/us/support/docs/servers-unified-computing/ucs-manager/110265-setup-syslog-for-ucs.html#configsremotesyslog) to configure the Cisco UCS to forward syslog. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address. + | | | |--------------------------|---| | **Tables Ingested** | `Syslog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/ciscoasa.md b/Tools/Solutions Analyzer/connector-docs/solutions/ciscoasa.md index c29b60c1643..2244d514f31 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/ciscoasa.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/ciscoasa.md @@ -13,31 +13,8 @@ ## Data Connectors -This solution provides **2 data connector(s)**. +**This solution does not include data connectors.** -### [Cisco ASA via Legacy Agent](../connectors/ciscoasa.md) - -**Publisher:** Cisco - -### [Cisco ASA/FTD via AMA](../connectors/ciscoasaama.md) - -**Publisher:** Microsoft - -The Cisco ASA firewall connector allows you to easily connect your Cisco ASA logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities. - -| | | -|--------------------------|---| -| **Tables Ingested** | `CommonSecurityLog` | -| **Connector Definition Files** | [template_CiscoAsaAma.JSON](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoASA/Data%20Connectors/template_CiscoAsaAma.JSON) | - -[→ View full connector details](../connectors/ciscoasaama.md) - -## Tables Reference - -This solution ingests data into **1 table(s)**: - -| Table | Used By Connectors | -|-------|-------------------| -| `CommonSecurityLog` | [Cisco ASA via Legacy Agent](../connectors/ciscoasa.md), [Cisco ASA/FTD via AMA](../connectors/ciscoasaama.md) | +This solution may contain other components such as analytics rules, workbooks, hunting queries, or playbooks. [← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/ciscoduosecurity.md b/Tools/Solutions Analyzer/connector-docs/solutions/ciscoduosecurity.md index b8bab58a2ba..b6070d700aa 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/ciscoduosecurity.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/ciscoduosecurity.md @@ -21,6 +21,74 @@ This solution provides **1 data connector(s)**. The Cisco Duo Security data connector provides the capability to ingest [authentication logs](https://duo.com/docs/adminapi#authentication-logs), [administrator logs](https://duo.com/docs/adminapi#administrator-logs), [telephony logs](https://duo.com/docs/adminapi#telephony-logs), [offline enrollment logs](https://duo.com/docs/adminapi#offline-enrollment-logs) and [Trust Monitor events](https://duo.com/docs/adminapi#trust-monitor) into Microsoft Sentinel using the Cisco Duo Admin API. Refer to [API documentation](https://duo.com/docs/adminapi) for more information. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Cisco Duo API credentials**: Cisco Duo API credentials with permission *Grant read log* is required for Cisco Duo API. See the [documentation](https://duo.com/docs/adminapi#first-steps) to learn more about creating Cisco Duo API credentials. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Cisco Duo API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**CiscoDuo**](https://aka.ms/sentinel-CiscoDuoSecurity-parser) which is deployed with the Microsoft Sentinel Solution. + +**STEP 1 - Obtaining Cisco Duo Admin API credentials** + +1. Follow [the instructions](https://duo.com/docs/adminapi#first-steps) to obtain **integration key**, **secret key**, and **API hostname**. Use **Grant read log** permission in the 4th step of [the instructions](https://duo.com/docs/adminapi#first-steps). + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Azure Blob Storage connection string and container name, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CiscoDuoSecurity-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CiscoDuoSecurity-azuredeploy-gov) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Cisco Duo Integration Key**, **Cisco Duo Secret Key**, **Cisco Duo API Hostname**, **Cisco Duo Log Types**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key** +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + 1. Download the [Azure Function App](https://aka.ms/sentinel-CiscoDuoSecurity-functionapp) file. Extract archive to your local development computer. +2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode. +3. After successful deployment of the function app, follow next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + CISCO_DUO_INTEGRATION_KEY + CISCO_DUO_SECRET_KEY + CISCO_DUO_API_HOSTNAME + CISCO_DUO_LOG_TYPES + WORKSPACE_ID + SHARED_KEY + logAnalyticsUri (Optional) + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `CiscoDuo_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/ciscomeraki.md b/Tools/Solutions Analyzer/connector-docs/solutions/ciscomeraki.md index 952150723ea..fc683ea5bef 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/ciscomeraki.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/ciscomeraki.md @@ -31,6 +31,26 @@ The [Cisco Meraki](https://aka.ms/ciscomeraki) connector allows you to easily co 1. Network Session +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +**Custom Permissions:** +- **Cisco Meraki REST API Key**: Enable API access in Cisco Meraki and generate API Key. Please refer to Cisco Meraki official [documentation](https://aka.ms/ciscomerakiapikey) for more information. +- **Cisco Meraki Organization Id**: Obtain your Cisco Meraki organization id to fetch security events. Follow the steps in the [documentation](https://aka.ms/ciscomerakifindorg) to obtain the Organization Id using the Meraki API Key obtained in previous step. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Cisco Meraki Security Events to Microsoft Sentinel** + +To enable Cisco Meraki Security Events for Microsoft Sentinel, provide the required information below and click on Connect. +>This data connector depends on a parser based on a Kusto Function to render the content. [**CiscoMeraki**](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/CiscoMeraki/Parsers/CiscoMeraki.txt) Parser currently support only "**IDS Alert**" and "**File Scanned**" Events. +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `APIKey`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + | | | |--------------------------|---| | **Tables Ingested** | `CiscoMerakiNativePoller_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/ciscoseg.md b/Tools/Solutions Analyzer/connector-docs/solutions/ciscoseg.md index d8577e79eb7..5a1347e08a2 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/ciscoseg.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/ciscoseg.md @@ -25,6 +25,65 @@ This solution provides **2 data connector(s)**. The [Cisco Secure Email Gateway (SEG)](https://www.cisco.com/c/en/us/products/security/email-security/index.html) data connector provides the capability to ingest [Cisco SEG Consolidated Event Logs](https://www.cisco.com/c/en/us/td/docs/security/esa/esa14-0/user_guide/b_ESA_Admin_Guide_14-0/b_ESA_Admin_Guide_12_1_chapter_0100111.html#con_1061902) into Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**CiscoSEGEvent**](https://aka.ms/sentinel-CiscoSEG-parser) which is deployed with the Microsoft Sentinel Solution. +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Common Event Format (CEF) logs to Syslog agent** + + Follow these steps to configure Cisco Secure Email Gateway to forward logs via syslog: + + Configure [Log Subscription](https://www.cisco.com/c/en/us/td/docs/security/esa/esa14-0/user_guide/b_ESA_Admin_Guide_14-0/b_ESA_Admin_Guide_12_1_chapter_0100111.html#con_1134718) + +>**NOTE:** Select **Consolidated Event Logs** in Log Type field. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +2Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/ciscoumbrella.md b/Tools/Solutions Analyzer/connector-docs/solutions/ciscoumbrella.md index 567e4f4e946..192a0433c19 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/ciscoumbrella.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/ciscoumbrella.md @@ -29,6 +29,90 @@ The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbre **NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing). +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Amazon S3 REST API Credentials/permissions**: **AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API. +- **Virtual Network permissions (for private access)**: For private storage account access, **Network Contributor** permissions are required on the Virtual Network and subnet. The subnet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**NOTE:** This connector has been updated to support [cisco umbrella log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning) + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App. + +>**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**. + +**STEP 1 - Network Prerequisites for Private Access** + +>**IMPORTANT:** When deploying with private storage account access, ensure the following network prerequisites are met: +> - **Virtual Network**: An existing Virtual Network (VNet) must be available +> - **Subnet**: A dedicated subnet within the VNet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration +> - **Subnet Delegation**: Configure the subnet delegation using Azure Portal, ARM template, or Azure CLI: +> - Azure Portal: Go to Virtual networks → Select your VNet → Subnets → Select subnet → Delegate subnet to service → Choose **Microsoft.Web/serverFarms** +> - Azure CLI: `az network vnet subnet update --resource-group --vnet-name --name --delegations Microsoft.Web/serverFarms` +> - **Private Endpoints**: The deployment will create private endpoints for storage account services (blob, file, queue, table) within the same subnet + +**STEP 2 - Configuration of the Cisco Umbrella logs collection** + +[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials. + +**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions** + +>**IMPORTANT:** Before deploying the Cisco Umbrella data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Option 1 - Azure Resource Manager (ARM) Template** + + Use this method for automated deployment of the Cisco Umbrella data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy-gov) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey** +4. **For Private Access Deployment**: Also enter **existingVnetName**, **existingVnetResourceGroupName**, and **existingSubnetName** (ensure subnet is delegated to Microsoft.Web/serverFarms) +**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value +5. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +6. Click **Purchase** to deploy. + + **Option 2 - Manual Deployment of Azure Functions** + + Use the following step-by-step instructions to deploy the Cisco Umbrella data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development. + +1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer. +2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode. +3. After successful deployment of the function app, follow next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + WorkspaceID + WorkspaceKey + S3Bucket + AWSAccessKeyId + AWSSecretAccessKey + logAnalyticsUri (optional) +> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +3. Once all application settings have been entered, click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `Cisco_Umbrella_audit_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/ciscowsa.md b/Tools/Solutions Analyzer/connector-docs/solutions/ciscowsa.md index f10b2f39f3e..4ec57b682c0 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/ciscowsa.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/ciscowsa.md @@ -21,6 +21,61 @@ This solution provides **1 data connector(s)**. [Cisco Web Security Appliance (WSA)](https://www.cisco.com/c/en/us/products/security/web-security-appliance/index.html) data connector provides the capability to ingest [Cisco WSA Access Logs](https://www.cisco.com/c/en/us/td/docs/security/wsa/wsa_14-0/User-Guide/b_WSA_UserGuide_14_0/b_WSA_UserGuide_11_7_chapter_010101.html) into Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**CiscoWSAEvent**](https://aka.ms/sentinel-CiscoWSA-parser) which is deployed with the Microsoft Sentinel Solution. + +>**NOTE:** This data connector has been developed using AsyncOS 14.0 for Cisco Web Security Appliance + +**1. Configure Cisco Web Security Appliance to forward logs via Syslog to remote server where you will install the agent.** + +[Follow these steps](https://www.cisco.com/c/en/us/td/docs/security/esa/esa14-0/user_guide/b_ESA_Admin_Guide_14-0/b_ESA_Admin_Guide_12_1_chapter_0100111.html#con_1134718) to configure Cisco Web Security Appliance to forward logs via Syslog + +>**NOTE:** Select **Syslog Push** as a Retrieval Method. + +**2. Install and onboard the agent for Linux or Windows** + +Install the agent on the Server to which the logs will be forwarded. + +> Logs on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**3. Check logs in Microsoft Sentinel** + +Open Log Analytics to check if the logs are received using the Syslog schema. + +>**NOTE:** It may take up to 15 minutes before new logs will appear in Syslog table. + | | | |--------------------------|---| | **Tables Ingested** | `Syslog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/citrix-adc.md b/Tools/Solutions Analyzer/connector-docs/solutions/citrix-adc.md index 880b1393be9..e28bf9742bc 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/citrix-adc.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/citrix-adc.md @@ -21,6 +21,70 @@ This solution provides **1 data connector(s)**. The [Citrix ADC (former NetScaler)](https://www.citrix.com/products/citrix-adc/) data connector provides the capability to ingest Citrix ADC logs into Microsoft Sentinel. If you want to ingest Citrix WAF logs into Microsoft Sentinel, refer this [documentation](https://learn.microsoft.com/azure/sentinel/data-connectors/citrix-waf-web-app-firewall) +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** 1. This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias CitrixADCEvent and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20ADC/Parsers/CitrixADCEvent.yaml), this function maps Citrix ADC (former NetScaler) events to Advanced Security Information Model [ASIM](https://docs.microsoft.com/azure/sentinel/normalization). The function usually takes 10-15 minutes to activate after solution installation/update. + +>**NOTE:** 2. This parser requires a watchlist named **`Sources_by_SourceType`** + +> i. If you don't have watchlist already created, please click [here](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FASIM%2Fdeploy%2FWatchlists%2FASimSourceType.json) to create. + +> ii. Open watchlist **`Sources_by_SourceType`** and add entries for this data source. + +> iii. The SourceType value for CitrixADC is **`CitrixADC`**. + +> You can refer [this](https://learn.microsoft.com/en-us/azure/sentinel/normalization-manage-parsers?WT.mc_id=Portal-fx#configure-the-sources-relevant-to-a-source-specific-parser) documentation for more details + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. + 2. Select **Apply below configuration to my machines** and select the facilities and severities. + 3. Click **Save**. +- **Open Syslog settings** + +**3. Configure Citrix ADC to forward logs via Syslog** + +3.1 Navigate to **Configuration tab > System > Auditing > Syslog > Servers tab** + + 3.2 Specify **Syslog action name**. + + 3.3 Set IP address of remote Syslog server and port. + + 3.4 Set **Transport type** as **TCP** or **UDP** depending on your remote Syslog server configuration. + + 3.5 You can refer Citrix ADC (former NetScaler) [documentation](https://docs.netscaler.com/) for more details. + +**4. Check logs in Microsoft Sentinel** + +Open Log Analytics to check if the logs are received using the Syslog schema. + +>**NOTE:** It may take up to 15 minutes before new logs will appear in Syslog table. + | | | |--------------------------|---| | **Tables Ingested** | `Syslog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/citrix-analytics-for-security.md b/Tools/Solutions Analyzer/connector-docs/solutions/citrix-analytics-for-security.md index 372c9be4910..aa4788bc8f1 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/citrix-analytics-for-security.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/citrix-analytics-for-security.md @@ -21,6 +21,25 @@ This solution provides **1 data connector(s)**. Citrix Analytics (Security) integration with Microsoft Sentinel helps you to export data analyzed for risky events from Citrix Analytics (Security) into Microsoft Sentinel environment. You can create custom dashboards, analyze data from other sources along with that from Citrix Analytics (Security) and create custom workflows using Logic Apps to monitor and mitigate security events. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Licensing**: Entitlements to Citrix Security Analytics in Citrix Cloud. Please review [Citrix Tool License Agreement.](https://aka.ms/sentinel-citrixanalyticslicense-readme) + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +To get access to this capability and the configuration steps on Citrix Analytics, please visit: [Connect Citrix to Microsoft Sentinel.](https://aka.ms/Sentinel-Citrix-Connector)​ +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `CitrixAnalytics_indicatorEventDetails_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/citrix-web-app-firewall.md b/Tools/Solutions Analyzer/connector-docs/solutions/citrix-web-app-firewall.md index b72361eaf88..c8602cd5acd 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/citrix-web-app-firewall.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/citrix-web-app-firewall.md @@ -29,6 +29,69 @@ This solution provides **2 data connector(s)**. Citrix WAF supports Common Event Format (CEF) which is an industry standard format on top of Syslog messages . By connecting Citrix WAF CEF logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Common Event Format (CEF) logs to Syslog agent** + + Configure Citrix WAF to send Syslog messages in CEF format to the proxy machine using the steps below. + +1. Follow [this guide](https://support.citrix.com/article/CTX234174) to configure WAF. + +2. Follow [this guide](https://support.citrix.com/article/CTX136146) to configure CEF logs. + +3. Follow [this guide](https://docs.citrix.com/en-us/citrix-adc/13/system/audit-logging/configuring-audit-logging.html) to forward the logs to proxy . Make sure you to send the logs to port 514 TCP on the Linux machine's IP address. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/claroty-xdome.md b/Tools/Solutions Analyzer/connector-docs/solutions/claroty-xdome.md index 6fc469ca77c..a293b9e6e6a 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/claroty-xdome.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/claroty-xdome.md @@ -21,6 +21,59 @@ This solution provides **1 data connector(s)**. [Claroty](https://claroty.com/) xDome delivers comprehensive security and alert management capabilities for healthcare and industrial network environments. It is designed to map multiple source types, identify the collected data, and integrate it into Microsoft Sentinel data models. This results in the ability to monitor all potential threats in your healthcare and industrial environments in one location, leading to more effective security monitoring and a stronger security posture. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python --version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Common Event Format (CEF) logs to Syslog agent** + +Configure the Claroty xDome - Microsoft Sentinel integration to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python --version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/claroty.md b/Tools/Solutions Analyzer/connector-docs/solutions/claroty.md index 17bacea0c5c..7da25121389 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/claroty.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/claroty.md @@ -25,6 +25,69 @@ This solution provides **2 data connector(s)**. The [Claroty](https://claroty.com/) data connector provides the capability to ingest [Continuous Threat Detection](https://claroty.com/resources/datasheets/continuous-threat-detection) and [Secure Remote Access](https://claroty.com/industrial-cybersecurity/sra) events into Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**ClarotyEvent**](https://aka.ms/sentinel-claroty-parser) which is deployed with the Microsoft Sentinel Solution. +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Configure Claroty to send logs using CEF** + + Configure log forwarding using CEF: + +1. Navigate to the **Syslog** section of the Configuration menu. + +2. Select **+Add**. + +3. In the **Add New Syslog Dialog** specify Remote Server **IP**, **Port**, **Protocol** and select **Message Format** - **CEF**. + +4. Choose **Save** to exit the **Add Syslog dialog**. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/cloudflare-ccf.md b/Tools/Solutions Analyzer/connector-docs/solutions/cloudflare-ccf.md index 30078a706ea..40671c3645f 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/cloudflare-ccf.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/cloudflare-ccf.md @@ -21,6 +21,32 @@ This solution provides **1 data connector(s)**. The Cloudflare data connector provides the capability to ingest Cloudflare logs into Microsoft Sentinel using the Cloudflare Logpush and Azure Blob Storage. Refer to [Cloudflare documentation](https://developers.cloudflare.com/logs/about/)for more information. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **Create a storage account and a container**: Before setting up logpush in Cloudflare, first create a storage account and a container in Microsoft Azure. Use [this guide](https://learn.microsoft.com/en-us/azure/storage/blobs/storage-blobs-introduction) to know more about Container and Blob. Follow the steps in the [documentation](https://learn.microsoft.com/en-us/azure/storage/common/storage-account-create?tabs=azure-portal) to create an Azure Storage account. +- **Generate a Blob SAS URL**: Create and Write permissions are required. Refer the [documentation](https://learn.microsoft.com/en-us/azure/ai-services/translator/document-translation/how-to-guides/create-sas-tokens?tabs=Containers) to know more about Blob SAS token and url. +- **Collecting logs from Cloudflare to your Blob container**: Follow the steps in the [documentation](https://developers.cloudflare.com/logs/get-started/enable-destinations/azure/) for collecting logs from Cloudflare to your Blob container. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Cloudflare Logs to Microsoft Sentinel** + +To enable Cloudflare logs for Microsoft Sentinel, provide the required information below and click on Connect. +> +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `ServicePrincipalIDTextBox_test`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. +- **The Blob container's URL you want to collect data from** +- **The Blob container's storage account resource group name** +- **The Blob container's storage account location** +- **The Blob container's storage account subscription id** +- **The event grid topic name of the blob container's storage account if exist. else keep empty.** +- Click 'Connect' to establish connection + | | | |--------------------------|---| | **Tables Ingested** | `CloudflareV2_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/cloudflare.md b/Tools/Solutions Analyzer/connector-docs/solutions/cloudflare.md index ff277347557..9f811580072 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/cloudflare.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/cloudflare.md @@ -25,6 +25,32 @@ This solution provides **2 data connector(s)**. The Cloudflare data connector provides the capability to ingest Cloudflare logs into Microsoft Sentinel using the Cloudflare Logpush and Azure Blob Storage. Refer to [Cloudflare documentation](https://developers.cloudflare.com/logs/about/)for more information. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **Create a storage account and a container**: Before setting up logpush in Cloudflare, first create a storage account and a container in Microsoft Azure. Use [this guide](https://learn.microsoft.com/en-us/azure/storage/blobs/storage-blobs-introduction) to know more about Container and Blob. Follow the steps in the [documentation](https://learn.microsoft.com/en-us/azure/storage/common/storage-account-create?tabs=azure-portal) to create an Azure Storage account. +- **Generate a Blob SAS URL**: Create and Write permissions are required. Refer the [documentation](https://learn.microsoft.com/en-us/azure/ai-services/translator/document-translation/how-to-guides/create-sas-tokens?tabs=Containers) to know more about Blob SAS token and url. +- **Collecting logs from Cloudflare to your Blob container**: Follow the steps in the [documentation](https://developers.cloudflare.com/logs/get-started/enable-destinations/azure/) for collecting logs from Cloudflare to your Blob container. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Cloudflare Logs to Microsoft Sentinel** + +To enable Cloudflare logs for Microsoft Sentinel, provide the required information below and click on Connect. +> +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `ServicePrincipalIDTextBox_test`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. +- **The Blob container's URL you want to collect data from** +- **The Blob container's storage account resource group name** +- **The Blob container's storage account location** +- **The Blob container's storage account subscription id** +- **The event grid topic name of the blob container's storage account if exist. else keep empty.** +- Click 'Connect' to establish connection + | | | |--------------------------|---| | **Tables Ingested** | `CloudflareV2_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/cofenseintelligence.md b/Tools/Solutions Analyzer/connector-docs/solutions/cofenseintelligence.md index 173c58592bd..18402e40a9d 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/cofenseintelligence.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/cofenseintelligence.md @@ -60,6 +60,180 @@ The [Cofense-Intelligence](https://cofense.com/product-services/phishing-intelli > https://learn.microsoft.com/microsoft-365/security/defender-endpoint/ti-indicator?view=o365-worldwide +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Azure Subscription**: Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group. +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **Cofense Username** and **Password** is required. See the documentation to learn more about API on the [Rest API reference](https://www.threathq.com/docs/rest_api_reference.html) +- **Microsoft Defender for Endpoints**: **Microsoft Defender for Endpoints License** is required for SentinelToDefender function. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Cofense Intelligence APIs to pull its Threat Indicators and create Threat Intelligence Indicators into Microsoft Sentinel Threat Intelligence and create/update Threat Indicators in Cofense. Likewise, it also creates/updates Cofense Based Threat Indicators in Microsoft Defender for Endpoints. All this might result in additional indicator and data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - App Registration steps for the Microsoft Azure Active Directory Application** + + This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new Azure Active Directory application: + 1. Sign in to the [Azure portal](https://portal.azure.com/). + 2. Search for and select **Azure Active Directory**. + 3. Under **Manage**, select **App registrations > New registration**. + 4. Enter a display **Name** for your application. + 5. Select **Register** to complete the initial app registration. + 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of CofenseIntelligence Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app) + +**STEP 2 - Add a client secret for Microsoft Azure Active Directory Application** + + Sometimes called an application password, a client secret is a string value required for the execution of CofenseIntelligence Data Connector. Follow the steps in this section to create a new Client Secret: + 1. In the Azure portal, in **App registrations**, select your application. + 2. Select **Certificates & secrets > Client secrets > New client secret**. + 3. Add a description for your client secret. + 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months. + 5. Select **Add**. + 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of CofenseIntelligence Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret) + +**STEP 3 - Assign role of Contributor to Microsoft Azure Active Directory Application** + + Follow the steps in this section to assign the role: + 1. In the Azure portal, Go to **Resource Group** and select your resource group. + 2. Go to **Access control (IAM)** from left panel. + 3. Click on **Add**, and then select **Add role assignment**. + 4. Select **Contributor** as role and click on next. + 5. In **Assign access to**, select `User, group, or service principal`. + 6. Click on **add members** and type **your app name** that you have created and select it. + 7. Now click on **Review + assign** and then again click on **Review + assign**. + +> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal) + +**STEP 4 - Assign Defender Threat Indicator permissions to Microsoft Azure Active Directory Application** + + Follow the steps in this section to assign the permissions: + 1. In the Azure portal, in **App registrations**, select **your application**. + 2. To enable an app to access Defender for Endpoint indicators, assign it **'Ti.ReadWrite.All'** permission, on your application page, select **API Permissions > Add permission > APIs my organization uses >, type WindowsDefenderATP, and then select WindowsDefenderATP**. + 3. Select **Application permissions > Ti.ReadWrite.All**, and then select **Add permissions**. + 4. Select **Grant consent**. + +> **Reference link:** [https://docs.microsoft.com/microsoft-365/security/defender-endpoint/exposed-apis-create-app-webapp?view=o365-worldwide](https://docs.microsoft.com/microsoft-365/security/defender-endpoint/exposed-apis-create-app-webapp?view=o365-worldwide) + +**STEP 5 - Steps to create/get Credentials for the Cofense Intelligence account** + + Follow the steps in this section to create/get **Cofense Username** and **Password**: + 1. Login to https://threathq.com and go to the **Settings menu** on the left navigation bar. + 2. Choose the API Tokens tab and select **Add a New Token** + 3. Make sure to save the **password**, as it will not be accessible again. + +**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Cofense Intelligence Threat Indicators data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Cofense API Authorization Key(s). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**7. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Cofense connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CofenseIntelligence-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the below information : + Function Name + Workspace ID + Workspace Key + Cofense BaseURL (https:///) + Cofense Username + Cofense Password + Azure Client ID + Azure Client Secret + Azure Tenant ID + Azure Resource Group Name + Azure Workspace Name + Azure Subscription ID + RequireProxy + Proxy Username (optional) + Proxy Password (optional) + Proxy URL (optional) + Proxy Port (optional) + LogLevel (optional) + Malware_Data_Table_name + SendCofenseIndicatorToDefender + Schedule +4. Click on **Review+Create**. +5. Then after validation click on **Create** to deploy. + +**8. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Cofense Intelligence Threat Indicators data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-CofenseIntelligence-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CofenseXXXXX). + + e. **Select a runtime:** Choose Python 3.11 or above. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective values (case-sensitive): + Workspace ID + Workspace Key + Cofense BaseURL (https:///) + Cofense Username + Cofense Password + Azure Client ID + Azure Client Secret + Azure Tenant ID + Azure Resource Group Name + Azure Workspace Name + Function App Name + Azure Subscription ID + RequireProxy + Proxy Username (optional) + Proxy Password (optional) + Proxy URL (optional) + Proxy Port (optional) + LogLevel (optional) + Malware_Data_Table_name + SendCofenseIndicatorToDefender + Schedule +4. Once all application settings have been entered, click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `Malware_Data_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/cofensetriage.md b/Tools/Solutions Analyzer/connector-docs/solutions/cofensetriage.md index 7b49e3a348c..55dffaf4fb1 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/cofensetriage.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/cofensetriage.md @@ -58,6 +58,181 @@ The [Cofense-Triage](https://cofense.com/product-services/cofense-triage/) data > https://learn.microsoft.com/microsoft-365/security/defender-endpoint/ti-indicator?view=o365-worldwide +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Azure Subscription**: Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group. +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **Cofense Client ID** and **Client Secret** is required. See the documentation to learn more about API on the `https:///docs/api/v2/index.html` +- **Microsoft Defender for Endpoints**: **Microsoft Defender for Endpoints License** is required for IndicatorCreatorToDefender function. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Cofense APIs to pull its Threat Indicators and create Threat Intelligence Indicators into Microsoft Sentinel Threat Intelligence and pulls Non-Cofense Threat Intelligence Indicators from Microsoft Sentinel and create/update Threat Indicators in Cofense. Likewise, it also creates/updates Cofense Based Threat Indicators in Microsoft Defender for Endpoints. All this might result in additional indicator and data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - App Registration steps for the Microsoft Azure Active Directory Application** + + This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new Azure Active Directory application: + 1. Sign in to the [Azure portal](https://portal.azure.com/). + 2. Search for and select **Azure Active Directory**. + 3. Under **Manage**, select **App registrations > New registration**. + 4. Enter a display **Name** for your application. + 5. Select **Register** to complete the initial app registration. + 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of CofenseTriage Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app) + +**STEP 2 - Add a client secret for Microsoft Azure Active Directory Application** + + Sometimes called an application password, a client secret is a string value required for the execution of CofenseTriage Data Connector. Follow the steps in this section to create a new Client Secret: + 1. In the Azure portal, in **App registrations**, select your application. + 2. Select **Certificates & secrets > Client secrets > New client secret**. + 3. Add a description for your client secret. + 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months. + 5. Select **Add**. + 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of CofenseTriage Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret) + +**STEP 3 - Assign role of Contributor to Microsoft Azure Active Directory Application** + + Follow the steps in this section to assign the role: + 1. In the Azure portal, Go to **Resource Group** and select your resource group. + 2. Go to **Access control (IAM)** from left panel. + 3. Click on **Add**, and then select **Add role assignment**. + 4. Select **Contributor** as role and click on next. + 5. In **Assign access to**, select `User, group, or service principal`. + 6. Click on **add members** and type **your app name** that you have created and select it. + 7. Now click on **Review + assign** and then again click on **Review + assign**. + +> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal) + +**STEP 4 - Assign Defender Threat Indicator permissions to Microsoft Azure Active Directory Application** + + Follow the steps in this section to assign the permissions: + 1. In the Azure portal, in **App registrations**, select **your application**. + 2. To enable an app to access Defender for Endpoint indicators, assign it **'Ti.ReadWrite.All'** permission, on your application page, select **API Permissions > Add permission > APIs my organization uses >, type WindowsDefenderATP, and then select WindowsDefenderATP**. + 3. Select **Application permissions > Ti.ReadWrite.All**, and then select **Add permissions**. + 4. Select **Grant consent**. + +> **Reference link:** [https://docs.microsoft.com/microsoft-365/security/defender-endpoint/exposed-apis-create-app-webapp?view=o365-worldwide](https://docs.microsoft.com/microsoft-365/security/defender-endpoint/exposed-apis-create-app-webapp?view=o365-worldwide) + +**STEP 5 - Steps to create/get Credentials for the Cofense Triage account** + + Follow the steps in this section to create/get **Cofense Client ID** and **Client Secret**: + 1. Go to **Administration > API Management > Version 2 tab > Applications** + 2. Click on **New Application** + 3. Add the required information and click on **submit**. + +**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Cofense Triage Threat Indicators data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Cofense API Authorization Key(s). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**7. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Cofense connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CofenseTriage-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the below information : + Function Name + Workspace ID + Workspace Key + Cofense URL (https:///) + Cofense Client ID + Cofense Client Secret + Azure Client ID + Azure Client Secret + Azure Tenant ID + Azure Resource Group Name + Azure Workspace Name + Azure Subscription ID + Threat Level + Proxy Username (optional) + Proxy Password (optional) + Proxy URL (optional) + Proxy Port (optional) + Throttle Limit for Non-Cofense Indicators (optional) + LogLevel (optional) + Reports Table Name + Schedule +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**8. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Cofense Triage Threat Indicators data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-CofenseThreatIndicatorsAPI-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CofenseXXXXX). + + e. **Select a runtime:** Choose Python 3.11 or above. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective values (case-sensitive): + Workspace ID + Workspace Key + Cofense URL (https:///) + Cofense Client ID + Cofense Client Secret + Azure Client ID + Azure Client Secret + Azure Tenant ID + Azure Resource Group Name + Azure Workspace Name + Azure Subscription ID + Threat Level + Proxy Username (optional) + Proxy Password (optional) + Proxy URL (optional) + Proxy Port (optional) + Throttle Limit for Non-Cofense Indicators (optional) + LogLevel (optional) + Reports Table Name + Schedule + logAnalyticsUri (optional) + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `Cofense_Triage_failed_indicators_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/cognni.md b/Tools/Solutions Analyzer/connector-docs/solutions/cognni.md index a5765b86620..8415b4d9a3d 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/cognni.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/cognni.md @@ -21,6 +21,30 @@ This solution provides **1 data connector(s)**. The Cognni connector offers a quick and simple integration with Microsoft Sentinel. You can use Cognni to autonomously map your previously unclassified important information and detect related incidents. This allows you to recognize risks to your important information, understand the severity of the incidents, and investigate the details you need to remediate, fast enough to make a difference. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect to Cognni** + +1. Go to [Cognni integrations page](https://intelligence.cognni.ai/integrations) +2. Click **'Connect'** on the 'Microsoft Sentinel' box +3. Copy and paste **'workspaceId'** and **'sharedKey'** (from below) to the related fields on Cognni's integrations screen +4. Click the **'Connect'** botton to complete the configuration. + Soon, all your Cognni-detected incidents will be forwarded here (into Microsoft Sentinel) + +Not a Cognni user? [Join us](https://azuremarketplace.microsoft.com/en-us/marketplace/apps/shieldox.appsource_freetrial) +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Shared Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `CognniIncidents_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/cognyteluminar.md b/Tools/Solutions Analyzer/connector-docs/solutions/cognyteluminar.md index c665eda40d9..d7647e6a663 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/cognyteluminar.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/cognyteluminar.md @@ -21,6 +21,87 @@ This solution provides **1 data connector(s)**. Luminar IOCs and Leaked Credentials connector allows integration of intelligence-based IOC data and customer-related leaked records identified by Luminar. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Azure Subscription**: Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group. +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **Luminar Client ID**, **Luminar Client Secret** and **Luminar Account ID** are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Cognyte Luminar API to pull Luminar IOCs and Leaked Credentials into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**1. Option 1 - Azure Resource Manager (ARM) Template for Flex Consumption Plan** + +Use this method for automated deployment of the data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CognyteLuminar-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Application ID**, **Tenant ID**,**Client Secret**, **Luminar API Client ID**, **Luminar API Account ID**, **Luminar API Client Secret**, **Luminar Initial Fetch Date**, **TimeInterval** and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**2. Option 1 - Azure Resource Manager (ARM) Template for Premium Plan** + +Use this method for automated deployment of the data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CognyteLuminar-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Application ID**, **Tenant ID**,**Client Secret**, **Luminar API Client ID**, **Luminar API Account ID**, **Luminar API Client Secret**, **Luminar Initial Fetch Date**, **TimeInterval** and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**3. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Cognyte Luminar data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> NOTE:You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-CognyteLuminar-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CognyteLuminarXXX). + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\tApplication ID\n\tTenant ID\n\tClient Secret\n\tLuminar API Client ID\n\tLuminar API Account ID\n\tLuminar API Client Secret\n\tLuminar Initial Fetch Date\n\tTimeInterval - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`\n3. Once all application settings have been entered, click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `ThreatIntelligenceIndicator` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/cohesitysecurity.md b/Tools/Solutions Analyzer/connector-docs/solutions/cohesitysecurity.md index af94ced25a6..917a976fac7 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/cohesitysecurity.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/cohesitysecurity.md @@ -21,6 +21,42 @@ This solution provides **1 data connector(s)**. The Cohesity function apps provide the ability to ingest Cohesity Datahawk ransomware alerts into Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Azure Blob Storage connection string and container name**: Azure Blob Storage connection string and container name + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions that connect to the Azure Blob Storage and KeyVault. This might result in additional costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/), [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) and [Azure KeyVault pricing page](https://azure.microsoft.com/pricing/details/key-vault/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Get a Cohesity DataHawk API key (see troubleshooting [instruction 1](https://github.com/Azure/Azure-Sentinel/tree/master/Solutions/CohesitySecurity/Data%20Connectors/Helios2Sentinel/IncidentProducer))** + +**STEP 2 - Register Azure app ([link](https://portal.azure.com/#view/Microsoft_AAD_IAM/ActiveDirectoryMenuBlade/~/RegisteredApps)) and save Application (client) ID, Directory (tenant) ID, and Secret Value ([instructions](https://learn.microsoft.com/en-us/azure/healthcare-apis/register-application)). Grant it Azure Storage (user_impersonation) permission. Also, assign the 'Microsoft Sentinel Contributor' role to the application in the appropriate subscription.** + +**STEP 3 - Deploy the connector and the associated Azure Functions**. + +**4. Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Cohesity data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-Cohesity-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the parameters that you created at the previous steps +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + | | | |--------------------------|---| | **Tables Ingested** | `Cohesity_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/common-event-format.md b/Tools/Solutions Analyzer/connector-docs/solutions/common-event-format.md index 3d514592eff..c6d8c95c8eb 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/common-event-format.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/common-event-format.md @@ -13,31 +13,8 @@ ## Data Connectors -This solution provides **2 data connector(s)**. +**This solution does not include data connectors.** -### [Common Event Format (CEF)](../connectors/cef.md) - -**Publisher:** Any - -### [Common Event Format (CEF) via AMA](../connectors/cefama.md) - -**Publisher:** Microsoft - -Common Event Format (CEF) is an industry standard format on top of Syslog messages, used by many security vendors to allow event interoperability among different platforms. By connecting your CEF logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2223547&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -| | | -|--------------------------|---| -| **Tables Ingested** | `CommonSecurityLog` | -| **Connector Definition Files** | [CEF%20AMA.JSON](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Common%20Event%20Format/Data%20Connectors/CEF%20AMA.JSON) | - -[→ View full connector details](../connectors/cefama.md) - -## Tables Reference - -This solution ingests data into **1 table(s)**: - -| Table | Used By Connectors | -|-------|-------------------| -| `CommonSecurityLog` | [Common Event Format (CEF)](../connectors/cef.md), [Common Event Format (CEF) via AMA](../connectors/cefama.md) | +This solution may contain other components such as analytics rules, workbooks, hunting queries, or playbooks. [← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/commvault-security-iq.md b/Tools/Solutions Analyzer/connector-docs/solutions/commvault-security-iq.md index 6d8ab274175..2d6dcd965f4 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/commvault-security-iq.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/commvault-security-iq.md @@ -21,6 +21,48 @@ This solution provides **1 data connector(s)**. This Azure Function enables Commvault users to ingest alerts/events into their Microsoft Sentinel instance. With Analytic Rules,Microsoft Sentinel can automatically create Microsoft Sentinel incidents from incoming events and logs. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Commvault Environment Endpoint URL**: Make sure to follow the documentation and set the secret value in KeyVault +- **Commvault QSDK Token**: Make sure to follow the documentation and set the secret value in KeyVault + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to a Commvault Instance to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Configuration steps for the Commvalut QSDK Token** + +[Follow these instructions](https://documentation.commvault.com/2024e/essential/creating_access_token.html) to create an API Token. + +**STEP 2 - Deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the CommvaultSecurityIQ data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Commvault Endpoint URL and QSDK Token, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Commvault Security IQ data connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CommvaultSecurityIQ-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Region**. +3. Enter the **Workspace ID**, **Workspace Key** 'and/or Other required fields' and click Next. +4. Click **Create** to deploy. + | | | |--------------------------|---| | **Tables Ingested** | `CommvaultSecurityIQ_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/contrast-protect.md b/Tools/Solutions Analyzer/connector-docs/solutions/contrast-protect.md index df4a82fe564..1fb192029a2 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/contrast-protect.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/contrast-protect.md @@ -25,6 +25,63 @@ This solution provides **2 data connector(s)**. Contrast Protect mitigates security threats in production applications with runtime protection and observability. Attack event results (blocked, probed, suspicious...) and other information can be sent to Microsoft Microsoft Sentinel to blend with security information from other systems. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Common Event Format (CEF) logs to Syslog agent** + + Configure the Contrast Protect agent to forward events to syslog as described here: https://docs.contrastsecurity.com/en/output-to-syslog.html. Generate some attack events for your application. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/contrastadr.md b/Tools/Solutions Analyzer/connector-docs/solutions/contrastadr.md index 6c80cd56ca7..7efdc08a204 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/contrastadr.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/contrastadr.md @@ -22,6 +22,34 @@ This solution provides **1 data connector(s)**. The ContrastADR data connector provides the capability to ingest Contrast ADR attack events into Microsoft Sentinel using the ContrastADR Webhook. ContrastADR data connector can enrich the incoming webhook data with ContrastADR API enrichment calls. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Use these Workspace id and primakey key as shared key in azure function app +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**2. Azure Resource Manager (ARM) Template** + +Use this method to automate deployment of the ContrastADR Data Connector using ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ContrastADR-azuredeploy) +2. Provide the following parameters: Region, Function Name, LOG_ANALYTICS_SHARED_KEY, LOG_ANALYTICS_WORKSPACE_ID + | | | |--------------------------|---| | **Tables Ingested** | `ContrastADRIncident_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/corelight.md b/Tools/Solutions Analyzer/connector-docs/solutions/corelight.md index 532e57f1084..2565413f58c 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/corelight.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/corelight.md @@ -21,6 +21,39 @@ This solution provides **1 data connector(s)**. The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution. + +**1. Get the files** + +Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration. + +**2. Replay sample data.** + +Replay sample data to create the needed tables in your Log Analytics workspace. +- **Send sample data (only needed once per Log Analytics workspace)**: `./send_samples.py --workspace-id {0} --workspace-key {1}` + +**3. Install custom exporter.** + +Install the custom exporter or the logstash container. + +**4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent.** + +Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Workspace Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `Corelight_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/cortex-xdr.md b/Tools/Solutions Analyzer/connector-docs/solutions/cortex-xdr.md index 0915a2fd424..46bead85550 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/cortex-xdr.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/cortex-xdr.md @@ -21,6 +21,34 @@ This solution provides **1 data connector(s)**. The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +#### Configuration steps for the Palo Alto Cortex XDR API + Follow the instructions to obtain the credentials. you can also follow this [guide](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/3u3j0e7hcx8t1-get-started-with-cortex-xdr-ap-is) to generate API key. +#### 1. Retrieve API URL + 1.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials + 1.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] + 1.3. Under [**Integrations**] click on [**API Keys**]. + 1.4. In the [**Settings**] Page click on [**Copy API URL**] in the top right corner. +#### 2. Retrieve API Token + 2.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials + 2.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] + 2.3. Under [**Integrations**] click on [**API Keys**]. + 2.4. In the [**Settings**] Page click on [**New Key**] in the top right corner. + 2.5. Choose security level, role, choose Standard and click on [**Generate**] + 2.6. Copy the API Token, once it generated the [**API Token ID**] can be found under the ID column +- **Base API URL**: https://api-example.xdr.au.paloaltonetworks.com +- **API Key ID**: API ID +- **API Token**: (password field) +- Click 'Connect' to establish connection + | | | |--------------------------|---| | **Tables Ingested** | `PaloAltoCortexXDR_Alerts_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/cribl.md b/Tools/Solutions Analyzer/connector-docs/solutions/cribl.md index 7a6c601691f..38817b3c2ef 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/cribl.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/cribl.md @@ -22,6 +22,22 @@ This solution provides **1 data connector(s)**. The [Cribl](https://cribl.io/accelerate-cloud-migration/) connector allows you to easily connect your Cribl (Cribl Enterprise Edition - Standalone) logs with Microsoft Sentinel. This gives you more security insight into your organization's data pipelines. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Installation and setup instructions for Cribl Stream for Microsoft Sentinel** + +Use the documentation from this Github repository and configure Cribl Stream using + +https://docs.cribl.io/stream/usecase-azure-workspace/ + | | | |--------------------------|---| | **Tables Ingested** | `CriblAccess_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/crowdstrike-falcon-endpoint-protection.md b/Tools/Solutions Analyzer/connector-docs/solutions/crowdstrike-falcon-endpoint-protection.md index 265dbe61b59..d37c9b1b0ff 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/crowdstrike-falcon-endpoint-protection.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/crowdstrike-falcon-endpoint-protection.md @@ -41,6 +41,91 @@ This solution provides **6 data connector(s)**. This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

+**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **SQS and AWS S3 account credentials/permissions**: **AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](https://www.crowdstrike.com/blog/tech-center/intro-to-falcon-data-replicator/). To start, contact CrowdStrike support. At your request they will create a CrowdStrike managed Amazon Web Services (AWS) S3 bucket for short term storage purposes as well as a SQS (simple queue service) account for monitoring changes to the S3 bucket. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**1. Prerequisites** + +1. Configure FDR in CrowdStrike - You must contact the [CrowdStrike support team](https://supportportal.crowdstrike.com/) to enable CrowdStrike FDR. + - Once CrowdStrike FDR is enabled, from the CrowdStrike console, navigate to Support --> API Clients and Keys. + - You need to Create new credentials to copy the AWS Access Key ID, AWS Secret Access Key, SQS Queue URL and AWS Region. +2. Register AAD application - For DCR to authentiate to ingest data into log analytics, you must use AAD application. + - [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. + - For **AAD Principal** Id of this application, access the AAD App through [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and capture Object Id from the application overview page. + +**2. Deployment Options** + +Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Crowdstrike Falcon Data Replicator connector V2 using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-gov) +2. Provide the required details such as Microsoft Sentinel Workspace, CrowdStrike AWS credentials, Azure AD Application details and ingestion configurations +> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. It is recommended to create a new Resource Group for deployment of function app and associated resources. +3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +4. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Crowdstrike Falcon Data Replicator connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy DCE, DCR and Custom Tables for data ingestion** + +1. Deploy the required DCE, DCR(s) and the Custom Tables by using the [Data Collection Resource ARM template](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-data-resource) +2. After successful deployment of DCE and DCR(s), get the below information and keep it handy (required during Azure Functions app deployment). + - DCE log ingestion - Follow the instructions available at [Create data collection endpoint](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-data-collection-endpoint) (Step 3). + - Immutable Ids of one or more DCRs (as applicable) - Follow the instructions available at [Collect information from the DCR](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#collect-information-from-the-dcr) (Stpe 2). + +**2. Deploy a Function App** + +1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-functionapp) file. Extract archive to your local development computer. +2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode. +3. After successful deployment of the function app, follow next steps for configuring it. + +**3. Configure the Function App** + +1. Go to Azure Portal for the Function App configuration. +2. In the Function App, select the Function App Name and select **Configuration**. +3. In the **Application settings** tab, select ** New application setting**. +4. Add each of the following application settings individually, with their respective string values (case-sensitive): + AWS_KEY + AWS_SECRET + AWS_REGION_NAME + QUEUE_URL + USER_SELECTION_REQUIRE_RAW //True if raw data is required + USER_SELECTION_REQUIRE_SECONDARY //True if secondary data is required + MAX_QUEUE_MESSAGES_MAIN_QUEUE // 100 for consumption and 150 for Premium + MAX_SCRIPT_EXEC_TIME_MINUTES // add the value of 10 here + AZURE_TENANT_ID + AZURE_CLIENT_ID + AZURE_CLIENT_SECRET + DCE_INGESTION_ENDPOINT + NORMALIZED_DCR_ID + RAW_DATA_DCR_ID + EVENT_TO_TABLE_MAPPING_LINK // File is present on github. Add if the file can be accessed using internet + REQUIRED_FIELDS_SCHEMA_LINK //File is present on github. Add if the file can be accessed using internet + Schedule //Add value as '0 */1 * * * *' to ensure the function runs every minute. +5. Once all application settings have been entered, click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `ASimAuditEventLogs` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/ctera.md b/Tools/Solutions Analyzer/connector-docs/solutions/ctera.md index 74cb6a8a813..c5a6f00a7fc 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/ctera.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/ctera.md @@ -27,6 +27,23 @@ The CTERA Data Connector for Microsoft Sentinel offers monitoring and threat det Additionally, it helps you identify critical patterns such as mass access denied events, mass deletions, and mass permission changes, enabling proactive threat management and response. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Step 1: Connect CTERA Platform to Syslog** + +Set up your CTERA portal syslog connection and Edge-Filer Syslog connector + +**2. Step 2: Install Azure Monitor Agent (AMA) on Syslog Server** + +Install the Azure Monitor Agent (AMA) on your syslog server to enable data collection. + | | | |--------------------------|---| | **Tables Ingested** | `Syslog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/ctm360.md b/Tools/Solutions Analyzer/connector-docs/solutions/ctm360.md index 1fe7bb75357..28ba3cc3808 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/ctm360.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/ctm360.md @@ -25,6 +25,96 @@ This solution provides **2 data connector(s)**. Through the API integration, you have the capability to retrieve all the issues related to your HackerView organizations via a RESTful interface. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to a '' to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Configuration steps for the 'HackerView' API** + +The provider should provide or link to detailed steps to configure the 'HackerView' API endpoint so that the Azure Function can authenticate to it successfully, get its authorization key or token, and pull the appliance's logs into Microsoft Sentinel. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the 'HackerView' connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the 'HackerView' API authorization key(s) readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the 'HackerView' connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CTM360-HV-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CTM360-HV-azuredeploy-gov) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID**, **Workspace Key**, **API **, 'and/or Other required fields'. +>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**Option 2 - Manual Deployment of Azure Functions** + + Use the following step-by-step instructions to deploy the 'HackerView' connector manually with Azure Functions. + +**5. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the CTM360 CBS data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development. + +1. Download the [Azure Function App](https://raw.githubusercontent.com/CTM360-Integrations/Azure-Sentinel/ctm360-HV-CBS-azurefunctionapp/Solutions/CTM360/Data%20Connectors/HackerView/AzureFunctionCTM360_HV.zip) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CTIXYZ). + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + CTM360AccountID + WorkspaceID + WorkspaceKey + CTM360Key + FUNCTION_NAME + logAnalyticsUri - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +3. Once all application settings have been entered, click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `HackerViewLog_Azure_1_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/cyberark-enterprise-password-vault-(epv)-events.md b/Tools/Solutions Analyzer/connector-docs/solutions/cyberark-enterprise-password-vault-(epv)-events.md index 9b5e9179c88..c5c77db5c39 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/cyberark-enterprise-password-vault-(epv)-events.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/cyberark-enterprise-password-vault-(epv)-events.md @@ -25,6 +25,60 @@ This solution provides **2 data connector(s)**. CyberArk Privilege Access Manager generates an xml Syslog message for every action taken against the Vault. The PAM will send the xml messages through the Microsoft Sentinel.xsl translator to be converted into CEF standard format and sent to a syslog staging server of your choice (syslog-ng, rsyslog). The Log Analytics agent installed on your syslog staging server will import the messages into Microsoft Log Analytics. Refer to the [CyberArk documentation](https://docs.cyberark.com/privilege-cloud-standard/Latest/en/Content/Privilege%20Cloud/privCloud-connect-siem.htm) for more guidance on SIEM integrations. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Common Event Format (CEF) logs to Syslog agent** + + On the EPV configure the dbparm.ini to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machines IP address. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machines security according to your organizations security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/cyberarkaudit.md b/Tools/Solutions Analyzer/connector-docs/solutions/cyberarkaudit.md index 3c3399c08c7..f8ef8efba5c 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/cyberarkaudit.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/cyberarkaudit.md @@ -21,6 +21,94 @@ This solution provides **1 data connector(s)**. The [CyberArk Audit](https://docs.cyberark.com/Audit/Latest/en/Content/Resources/_TopNav/cc_Home.htm) data connector provides the capability to retrieve security event logs of the CyberArk Audit service and more events into Microsoft Sentinel through the REST API. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Audit REST API Connections details and Credentials**: **OauthUsername**, **OauthPassword**, **WebAppID**, **AuditApiKey**, **IdentityEndpoint** and **AuditApiBaseUrl** are required for making API calls. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Azure Blob Storage API to pull logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details. + +>**NOTE:** API authorization key(s) or token(s) are securely stored in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. + +**STEP 1 - Configuration steps for the CyberArk Audit SIEM Integration** + + Follow the [instructions](https://docs.cyberark.com/audit/latest/en/Content/Audit/isp_Microsoft_Sentinel.htm?tocpath=SIEM%20integrations%7C_____3) to obtain connection details and credentials. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the CyberArk Audit data connector, have the Workspace Name and Workspace Location (can be copied from the following). +- **Workspace Name**: `WorkspaceName` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Workspace Location**: `WorkspaceLocation` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the CyberArk Audit data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CyberArkAuditAPI-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. +3. Enter the **CyberArkAuditUsername**, **CyberArkAuditPassword**, **CyberArkAuditServerURL** and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the CyberArk Audit data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-CyberArkAudit-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CyberArkXXXXX). + + e. **Select a runtime:** Choose Python 3.10. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select ** New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + CyberArkAuditUsername + CyberArkAuditPassword + CyberArkAuditServerURL + WorkspaceID + WorkspaceKey + logAnalyticsUri (optional) +> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `CyberArkAudit` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/cybersixgill-actionable-alerts.md b/Tools/Solutions Analyzer/connector-docs/solutions/cybersixgill-actionable-alerts.md index 015fef200e6..a28bb83c09f 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/cybersixgill-actionable-alerts.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/cybersixgill-actionable-alerts.md @@ -22,6 +22,86 @@ This solution provides **1 data connector(s)**. Actionable alerts provide customized alerts based on configured assets +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **Client_ID** and **Client_Secret** are required for making API calls. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Cybersixgill API to pull Alerts into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**1. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Cybersixgill Actionable Alerts data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/senitnel-cybersixgill-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID**, **Workspace Key**, **Client ID**, **Client Secret**, **TimeInterval** and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**2. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Cybersixgill Actionable Alerts data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> NOTE:You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cybersixgill-Actionable-Alerts/Data%20Connectors/CybersixgillAlerts.zip?raw=true) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CybersixgillAlertsXXX). + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + ClientID + ClientSecret + Polling + WorkspaceID + WorkspaceKey + logAnalyticsUri (optional) + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us` +3. Once all application settings have been entered, click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `CyberSixgill_Alerts_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/cyborg-security-hunter.md b/Tools/Solutions Analyzer/connector-docs/solutions/cyborg-security-hunter.md index 3f25dcf6984..68df3ffd0a7 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/cyborg-security-hunter.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/cyborg-security-hunter.md @@ -26,6 +26,39 @@ Cyborg Security is a leading provider of advanced threat hunting solutions, with Follow the steps to gain access to Cyborg Security's Community and setup the 'Open in Tool' capabilities in the HUNTER Platform. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +ℹ️ Use the following link to find your Azure Tentant ID How to find your Azure Active Directory tenant ID +- **ResourceGroupName & WorkspaceName**: `{0}` +- **WorkspaceID**: `{0}` + +**1. Sign up for Cyborg Security's HUNTER Community Account** + +Cyborg Security offers Community Memebers access to a subset of the Emerging Threat Collections and hunt packages. + +Create a Free Commuinity Account to get access to Cyborg Security's Hunt Packages: [Sign Up Now!](https://www.cyborgsecurity.com/user-account-creation/) + +**2. Configure the Open in Tool Feature** + +1. Navigate to the [Environment](https://hunter.cyborgsecurity.io/environment) section of the HUNTER Platform. +2. Fill in te **Root URI** of your environment in the section labeled **Microsoft Sentinel**. Replace the with the IDs and Names of your Subscription, Resource Groups and Workspaces. + + https[]()://portal.azure.com#@**AzureTenantID**/blade/Microsoft_OperationsManagementSuite_Workspace/Logs.ReactView/resourceId/%2Fsubscriptions%2F**AzureSubscriptionID**%2Fresourcegroups%2F**ResourceGroupName**%2Fproviders%2Fmicrosoft.operationalinsights%2Fworkspaces%2F<**WorkspaceName**>/ +3. Click **Save**. + +**3. Execute a HUNTER hunt pacakge in Microsoft Sentinel** + +Identify a Cyborg Security HUNTER hunt package to deploy and use the **Open In Tool** button to quickly open Microsoft Sentinel and stage the hunting content. + +![image](https://7924572.fs1.hubspotusercontent-na1.net/hubfs/7924572/HUNTER/Screenshots/openintool-ms-new.png) + | | | |--------------------------|---| | **Tables Ingested** | `SecurityEvent` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/cyeradspm.md b/Tools/Solutions Analyzer/connector-docs/solutions/cyeradspm.md index 28aa85d1681..00c46be80dd 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/cyeradspm.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/cyeradspm.md @@ -16,15 +16,71 @@ This solution provides **2 data connector(s)**. -### [Cyera DSPM Azure Sentinel Data Connector](../connectors/cyeradspmccf.md) +### [Cyera DSPM Microsoft Sentinel Data Connector](../connectors/cyeradspmccf.md) **Publisher:** Cyera Inc -### [Cyera DSPM Azure Functions Sentinel Data Connector](../connectors/cyerafunctionsconnector.md) +### [Cyera DSPM Azure Functions Microsoft Sentinel Data Connector](../connectors/cyerafunctionsconnector.md) **Publisher:** Cyera Inc -The **Cyera DSPM Azure Function Connector** enables seamless ingestion of Cyera’s **Data Security Posture Management (DSPM)** telemetry — *Assets*, *Identities*, *Issues*, and *Classifications* — into **Microsoft Sentinel**.\n\nThis connector uses an **Azure Function App** to call Cyera’s REST API on a schedule, fetch the latest DSPM telemetry, and send it to Sentinel through the **Azure Monitor Logs Ingestion API** via a **Data Collection Endpoint (DCE)** and **Data Collection Rule (DCR, kind: Direct)** — no agents required.\n\n**Tables created/used**\n\n| Entity | Table | Purpose |\n|---|---|---|\n| Assets | `CyeraAssets_CL` | Raw asset metadata and data-store context |\n| Identities | `CyeraIdentities_CL` | Identity definitions and sensitivity context |\n| Issues | `CyeraIssues_CL` | Findings and remediation details |\n| Classifications | `CyeraClassifications_CL` | Data class & sensitivity definitions |\n| MS View | `CyeraAssets_MS_CL` | Normalized asset view for dashboards |\n\n> **Note:** This v7 connector supersedes the earlier CCF-based approach and aligns with Microsoft’s recommended Direct ingestion path for Sentinel. +The **Cyera DSPM Azure Function Connector** enables seamless ingestion of Cyera’s **Data Security Posture Management (DSPM)** telemetry — *Assets*, *Identities*, *Issues*, and *Classifications* — into **Microsoft Sentinel**.\n\nThis connector uses an **Azure Function App** to call Cyera’s REST API on a schedule, fetch the latest DSPM telemetry, and send it to Microsoft Sentinel through the **Azure Monitor Logs Ingestion API** via a **Data Collection Endpoint (DCE)** and **Data Collection Rule (DCR, kind: Direct)** — no agents required.\n\n**Tables created/used**\n\n| Entity | Table | Purpose |\n|---|---|---|\n| Assets | `CyeraAssets_CL` | Raw asset metadata and data-store context |\n| Identities | `CyeraIdentities_CL` | Identity definitions and sensitivity context |\n| Issues | `CyeraIssues_CL` | Findings and remediation details |\n| Classifications | `CyeraClassifications_CL` | Data class & sensitivity definitions |\n| MS View | `CyeraAssets_MS_CL` | Normalized asset view for dashboards |\n\n> **Note:** This v7 connector supersedes the earlier CCF-based approach and aligns with Microsoft’s recommended Direct ingestion path for Microsoft Sentinel. + +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Note** + +>**NOTE:** This connector uses an **Azure Function App** and the **Azure Monitor Logs Ingestion API** (DCE + DCR, kind: Direct). Function runtime and data egress may incur charges. See [Azure Functions pricing](https://azure.microsoft.com/pricing/details/functions/). + +**2. Optional Step** + +>**(Optional)** Store Cyera API credentials in **Azure Key Vault** and reference them from the Function App. See [Key Vault references](https://learn.microsoft.com/azure/app-service/app-service-key-vault-references). + +**3. STEP 1 — Prepare Cyera API Access** + +1) Generate a **Personal Access Token** [Generating Personal Access Token](https://support.cyera.io/hc/en-us/articles/19446274608919-Personal-and-API-Tokens) in your Cyera tenant.\n2) Note **API Base URL**, **Client ID**, and **Client Secret**. + +**4. STEP 2 — Choose ONE deployment option** + +> Before deploying, have these values handy: +- **Cyera Function Connector Name**: `CyeraDSPMConnector` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Workspace Name**: `{{workspace-location}}` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Workspace Location**: `{{workspace-location}}` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Cyera Base URL**: `https://api.cyera.io` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Cyera Personal Access Token Client ID**: `CyeraClientID` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Cyera Personal Access Token Secret**: `CyeraSecret` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**5. Option 1** + +**Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Cyera DSPM Functions and all required resources to support the connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/{{deployment-template-uri) +2. Select the preferred **FunctionName** and **Workspace Name**. +3. Enter the **Workspace Location**, **Cyera API Base Url**, **Personal Access Token Client ID**, and **Personal Access Token Secret**. +>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**6. Option 2 — Manual Deployment** + +Follow the [install pack’s step-by-step guide]({{userguide-url}}.\n\n1) Create/update the 5 custom tables, data collection rule with format `sentinel-dce-`, and data collection endpoint with format `sentinel-dcr-` using the scripts in [install-pack-v0_7_0/scripts]({{deployment-script-zip-url}}).\n2) Deploy the Azure Function from the repo`s Function folder (Timer-trigger; schedule typically 5–15 minutes).\n3) Configure Function App settings:\n - `CyeraBaseUrl` — Cyera API Base URL\n - `CyeraClientId` — Client ID (PAT)\n - `CyeraSecret` — Client Secret (PAT)\n - `DCR_IMMUTABLE_ID` — DCR immutable ID\n - `DCE_ENDPOINT` — Logs ingestion endpoint URL\n - `STREAM_ASSETS`=`Custom-CyeraAssets`, `STREAM_IDENTITIES`=`Custom-CyeraIdentities`, `STREAM_ISSUES`=`Custom-CyeraIssues`, `STREAM_CLASSIFICATIONS`=`Custom-CyeraClassifications`\n4) Save and Start the Function App. | | | |--------------------------|---| @@ -43,10 +99,10 @@ This solution ingests data into **5 table(s)**: | Table | Used By Connectors | |-------|-------------------| -| `CyeraAssets_CL` | [Cyera DSPM Azure Functions Sentinel Data Connector](../connectors/cyerafunctionsconnector.md), [Cyera DSPM Azure Sentinel Data Connector](../connectors/cyeradspmccf.md) | -| `CyeraAssets_MS_CL` | [Cyera DSPM Azure Functions Sentinel Data Connector](../connectors/cyerafunctionsconnector.md), [Cyera DSPM Azure Sentinel Data Connector](../connectors/cyeradspmccf.md) | -| `CyeraClassifications_CL` | [Cyera DSPM Azure Functions Sentinel Data Connector](../connectors/cyerafunctionsconnector.md), [Cyera DSPM Azure Sentinel Data Connector](../connectors/cyeradspmccf.md) | -| `CyeraIdentities_CL` | [Cyera DSPM Azure Functions Sentinel Data Connector](../connectors/cyerafunctionsconnector.md), [Cyera DSPM Azure Sentinel Data Connector](../connectors/cyeradspmccf.md) | -| `CyeraIssues_CL` | [Cyera DSPM Azure Functions Sentinel Data Connector](../connectors/cyerafunctionsconnector.md), [Cyera DSPM Azure Sentinel Data Connector](../connectors/cyeradspmccf.md) | +| `CyeraAssets_CL` | [Cyera DSPM Azure Functions Microsoft Sentinel Data Connector](../connectors/cyerafunctionsconnector.md), [Cyera DSPM Microsoft Sentinel Data Connector](../connectors/cyeradspmccf.md) | +| `CyeraAssets_MS_CL` | [Cyera DSPM Azure Functions Microsoft Sentinel Data Connector](../connectors/cyerafunctionsconnector.md), [Cyera DSPM Microsoft Sentinel Data Connector](../connectors/cyeradspmccf.md) | +| `CyeraClassifications_CL` | [Cyera DSPM Azure Functions Microsoft Sentinel Data Connector](../connectors/cyerafunctionsconnector.md), [Cyera DSPM Microsoft Sentinel Data Connector](../connectors/cyeradspmccf.md) | +| `CyeraIdentities_CL` | [Cyera DSPM Azure Functions Microsoft Sentinel Data Connector](../connectors/cyerafunctionsconnector.md), [Cyera DSPM Microsoft Sentinel Data Connector](../connectors/cyeradspmccf.md) | +| `CyeraIssues_CL` | [Cyera DSPM Azure Functions Microsoft Sentinel Data Connector](../connectors/cyerafunctionsconnector.md), [Cyera DSPM Microsoft Sentinel Data Connector](../connectors/cyeradspmccf.md) | [← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/cyfirma-attack-surface.md b/Tools/Solutions Analyzer/connector-docs/solutions/cyfirma-attack-surface.md index 004803cec02..1608a1d2ee4 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/cyfirma-attack-surface.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/cyfirma-attack-surface.md @@ -19,6 +19,23 @@ This solution provides **1 data connector(s)**. **Publisher:** Microsoft +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. CYFIRMA Attack Surface** + +Connect to CYFIRMA Attack Surface to ingest alerts into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into custom tables during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency. +- **CYFIRMA API URL**: https://decyfir.cyfirma.com +- **CYFIRMA API Key**: (password field) +- **API Delta**: API Delta +- Click 'Connect' to establish connection + | | | |--------------------------|---| | **Tables Ingested** | `CyfirmaASCertificatesAlerts_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/cyfirma-brand-intelligence.md b/Tools/Solutions Analyzer/connector-docs/solutions/cyfirma-brand-intelligence.md index ac3d620e5ba..dd739a67cac 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/cyfirma-brand-intelligence.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/cyfirma-brand-intelligence.md @@ -19,6 +19,23 @@ This solution provides **1 data connector(s)**. **Publisher:** Microsoft +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. CYFIRMA Brand Intelligence** + +Connect to CYFIRMA Brand Intelligence to ingest alerts data into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT Alerts API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into custom tables during ingestion. This enhances performance and efficiency by eliminating the need for query-time parsing. +- **CYFIRMA API URL**: https://decyfir.cyfirma.com +- **CYFIRMA API Key**: (password field) +- **API Delta**: API Delta +- Click 'Connect' to establish connection + | | | |--------------------------|---| | **Tables Ingested** | `CyfirmaBIDomainITAssetAlerts_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/cyfirma-compromised-accounts.md b/Tools/Solutions Analyzer/connector-docs/solutions/cyfirma-compromised-accounts.md index ea2f1e2afb8..c80ab84a5ce 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/cyfirma-compromised-accounts.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/cyfirma-compromised-accounts.md @@ -21,6 +21,23 @@ This solution provides **1 data connector(s)**. The CYFIRMA Compromised Accounts data connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR/DeTCT API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. CYFIRMA Compromised Accounts** + +The CYFIRMA Compromised Accounts Data Connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR/DeTCT API to retrieve logs. Additionally, it supports DCR-based ingestion time transformations, which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency. +- **CYFIRMA API URL**: https://decyfir.cyfirma.com +- **CYFIRMA API Key**: (password field) +- **API Delta**: API Delta +- Click 'Connect' to establish connection + | | | |--------------------------|---| | **Tables Ingested** | `CyfirmaCompromisedAccounts_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/cyfirma-cyber-intelligence.md b/Tools/Solutions Analyzer/connector-docs/solutions/cyfirma-cyber-intelligence.md index 5c1e6df7b18..042df62b263 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/cyfirma-cyber-intelligence.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/cyfirma-cyber-intelligence.md @@ -21,6 +21,26 @@ This solution provides **1 data connector(s)**. The CYFIRMA Cyber Intelligence data connector enables seamless log ingestion from the DeCYFIR API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. CYFIRMA Cyber Intelligence** + +This connector provides the Indicators, Threat actors, Malware and Campaigns logs from CYFIRMA Cyber Intelligence. The connector uses the DeCYFIR API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency. +- **CYFIRMA API URL**: https://decyfir.cyfirma.com +- **CYFIRMA API Key**: (password field) +- **Pull all IoC's Or Tailored IoC's**: All IoC's or Tailored IoC's +- **API Delta**: API Delta +- **Recommended Actions**: Recommended Action can be any one of:All/Monitor/Block +- **Threat Actor Associated**: Is any Threat Actor Associated with the IoC's +- Click 'Connect' to establish connection + | | | |--------------------------|---| | **Tables Ingested** | `CyfirmaCampaigns_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/cyfirma-digital-risk.md b/Tools/Solutions Analyzer/connector-docs/solutions/cyfirma-digital-risk.md index 0d189aea70b..39fda8fe1c6 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/cyfirma-digital-risk.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/cyfirma-digital-risk.md @@ -21,6 +21,23 @@ This solution provides **1 data connector(s)**. The CYFIRMA Digital Risk Alerts data connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. CYFIRMA Digital Risk** + +Connect to CYFIRMA Digital Risk Alerts to ingest logs into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve alerts and supports DCR-based ingestion time transformations for efficient log parsing. +- **CYFIRMA API URL**: https://decyfir.cyfirma.com +- **CYFIRMA API Key**: (password field) +- **API Delta**: API Delta +- Click 'Connect' to establish connection + | | | |--------------------------|---| | **Tables Ingested** | `CyfirmaDBWMDarkWebAlerts_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/cyfirma-vulnerabilities-intel.md b/Tools/Solutions Analyzer/connector-docs/solutions/cyfirma-vulnerabilities-intel.md index db9fecd9a7e..52ce1c92d07 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/cyfirma-vulnerabilities-intel.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/cyfirma-vulnerabilities-intel.md @@ -21,6 +21,26 @@ This solution provides **1 data connector(s)**. The CYFIRMA Vulnerabilities Intelligence data connector enables seamless log ingestion from the DeCYFIR API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the CYFIRMA API's to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. CYFIRMA Vulnerabilities Intelligence** + +This connector provides the Vulnerabilities logs from CYFIRMA Vulnerabilities Intelligence. The connector uses the DeCYFIR API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency. +- **CYFIRMA API URL**: https://decyfir.cyfirma.com +- **CYFIRMA API Key**: (password field) +- **API Delta**: API Delta +- **Vendor-Associated Vulnerabilities** +- **Product-Associated Vulnerabilities** +- **Product with Version-Associated Vulnerabilities** +- Click 'Connect' to establish connection + | | | |--------------------------|---| | **Tables Ingested** | `CyfirmaVulnerabilities_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/cynerio.md b/Tools/Solutions Analyzer/connector-docs/solutions/cynerio.md index 574beee7da0..2ec130bb4fc 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/cynerio.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/cynerio.md @@ -22,6 +22,32 @@ This solution provides **1 data connector(s)**. The [Cynerio](https://www.cynerio.com/) connector allows you to easily connect your Cynerio Security Events with Microsoft Sentinel, to view IDS Events. This gives you more insight into your organization network security posture and improves your security operation capabilities. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Configure and connect Cynerio** + +Cynerio can integrate with and export events directly to Microsoft Sentinel via Azure Server. Follow these steps to establish integration: + +1. In the Cynerio console, go to Settings > Integrations tab (default), and click on the **+Add Integration** button at the top right. + +2. Scroll down to the **SIEM** section. + +3. On the Microsoft Sentinel card, click the Connect button. + +4. The Integration Details window opens. Use the parameters below to fill out the form and set up the connection. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `CynerioEvent_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/darktrace.md b/Tools/Solutions Analyzer/connector-docs/solutions/darktrace.md index 3872767bfb2..0186d625abb 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/darktrace.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/darktrace.md @@ -21,6 +21,39 @@ This solution provides **1 data connector(s)**. The Darktrace REST API connector pushes real-time events from Darktrace to Microsoft Sentinel and is designed to be used with the Darktrace Solution for Sentinel. The connector writes logs to a custom log table titled "darktrace_model_alerts_CL"; Model Breaches, AI Analyst Incidents, System Alerts and Email Alerts can be ingested - additional filters can be set up on the Darktrace System Configuration page. Data is pushed to Sentinel from Darktrace masters. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Darktrace Prerequisites**: To use this Data Connector a Darktrace master running v5.2+ is required. + Data is sent to the [Azure Monitor HTTP Data Collector API](https://docs.microsoft.com/azure/azure-monitor/logs/data-collector-api) over HTTPs from Darktrace masters, therefore outbound connectivity from the Darktrace master to Microsoft Sentinel REST API is required. +- **Filter Darktrace Data**: During configuration it is possible to set up additional filtering on the Darktrace System Configuration page to constrain the amount or types of data sent. +- **Try the Darktrace Sentinel Solution**: You can get the most out of this connector by installing the Darktrace Solution for Microsoft Sentinel. This will provide workbooks to visualise alert data and analytics rules to automatically create alerts and incidents from Darktrace Model Breaches and AI Analyst incidents. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +1. Detailed setup instructions can be found on the Darktrace Customer Portal: https://customerportal.darktrace.com/product-guides/main/microsoft-sentinel-introduction + 2. Take note of the Workspace ID and the Primary key. You will need to enter these details on your Darktrace System Configuration page. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**2. Darktrace Configuration** + +1. Perform the following steps on the Darktrace System Configuration page: + 2. Navigate to the System Configuration Page (Main Menu > Admin > System Config) + 3. Go into Modules configuration and click on the "Microsoft Sentinel" configuration card + 4. Select "HTTPS (JSON)" and hit "New" + 5. Fill in the required details and select appropriate filters + 6. Click "Verify Alert Settings" to attempt authentication and send out a test alert + 7. Run a "Look for Test Alerts" sample query to validate that the test alert has been received + | | | |--------------------------|---| | **Tables Ingested** | `darktrace_model_alerts_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/datalake2sentinel.md b/Tools/Solutions Analyzer/connector-docs/solutions/datalake2sentinel.md index 1b38e3d12df..93186c107f1 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/datalake2sentinel.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/datalake2sentinel.md @@ -22,6 +22,21 @@ This solution provides **1 data connector(s)**. This solution installs the Datalake2Sentinel connector which is built using the Codeless Connector Platform and allows you to automatically ingest threat intelligence indicators from **Datalake Orange Cyberdefense's CTI platform** into Microsoft Sentinel via the Upload Indicators REST API. After installing the solution, configure and enable this data connector by following guidance in Manage solution view. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Installation and setup instructions** + +Use the documentation from this Github repository to install and configure the Datalake to Microsoft Sentinel connector. + +https://github.com/cert-orangecyberdefense/datalake2sentinel + | | | |--------------------------|---| | **Tables Ingested** | `ThreatIntelligenceIndicator` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/dataminr-pulse.md b/Tools/Solutions Analyzer/connector-docs/solutions/dataminr-pulse.md index c3f9e97ccc1..92a70f8f129 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/dataminr-pulse.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/dataminr-pulse.md @@ -22,6 +22,240 @@ This solution provides **1 data connector(s)**. Dataminr Pulse Alerts Data Connector brings our AI-powered real-time intelligence into Microsoft Sentinel for faster threat detection and response. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Azure Subscription**: Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group. +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Required Dataminr Credentials/permissions**: + +a. Users must have a valid Dataminr Pulse API **client ID** and **secret** to use this data connector. + + b. One or more Dataminr Pulse Watchlists must be configured in the Dataminr Pulse website. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the DataminrPulse in which logs are pushed via Dataminr RTAP and it will ingest logs into Microsoft Sentinel. Furthermore, the connector will fetch the ingested data from the custom logs table and create Threat Intelligence Indicators into Microsoft Sentinel Threat Intelligence. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1- Credentials for the Dataminr Pulse Client ID and Client Secret** + + * Obtain Dataminr Pulse user ID/password and API client ID/secret from your Dataminr Customer Success Manager (CSM). + +**STEP 2- Configure Watchlists in Dataminr Pulse portal.** + + Follow the steps in this section to configure watchlists in portal: + + 1. **Login** to the Dataminr Pulse [website](https://app.dataminr.com). + + 2. Click on the settings gear icon, and select **Manage Lists**. + + 3. Select the type of Watchlist you want to create (Cyber, Topic, Company, etc.) and click the **New List** button. + + 4. Provide a **name** for your new Watchlist, and select a highlight color for it, or keep the default color. + + 5. When you are done configuring the Watchlist, click **Save** to save it. + +**STEP 3 - App Registration steps for the Application in Microsoft Entra ID** + + This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID: + 1. Sign in to the [Azure portal](https://portal.azure.com/). + 2. Search for and select **Microsoft Entra ID**. + 3. Under **Manage**, select **App registrations > New registration**. + 4. Enter a display **Name** for your application. + 5. Select **Register** to complete the initial app registration. + 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of DataminrPulse Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app) + +**STEP 4 - Add a client secret for application in Microsoft Entra ID** + + Sometimes called an application password, a client secret is a string value required for the execution of DataminrPulse Data Connector. Follow the steps in this section to create a new Client Secret: + 1. In the Azure portal, in **App registrations**, select your application. + 2. Select **Certificates & secrets > Client secrets > New client secret**. + 3. Add a description for your client secret. + 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months. + 5. Select **Add**. + 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of DataminrPulse Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret) + +**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID** + + Follow the steps in this section to assign the role: + 1. In the Azure portal, Go to **Resource Group** and select your resource group. + 2. Go to **Access control (IAM)** from left panel. + 3. Click on **Add**, and then select **Add role assignment**. + 4. Select **Contributor** as role and click on next. + 5. In **Assign access to**, select `User, group, or service principal`. + 6. Click on **add members** and type **your app name** that you have created and select it. + 7. Now click on **Review + assign** and then again click on **Review + assign**. + +> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal) + +**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Dataminr Pulse Microsoft Sentinel data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**7. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the DataminrPulse connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-DataminrPulseAlerts-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-DataminrPulseAlerts-azuredeploy-gov) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the below information : + + a. **Function Name** + + b. **Location**: The location in which the data collection rules and data collection endpoints should be deployed. + + c. **Workspace**: Enter Workspace ID of log analytics Workspace ID + + d. **Workspace Key**: Enter Primary Key of log analytics Workspace + + e. **DataminrBaseURL**: Enter Base URL starting with "https://" followed by hostname (Example: https://gateway.dataminr.com/) + + f. **ClientId**: Enter your Dataminr account Client ID + + g. **ClientSecret**: Enter your Dataminr account Client Secret + + h. **AzureEntraObjectID**: Enter Object id of your Microsoft Entra App + + i. **AlertsTableName**: Enter name of the table used to store Dataminr Alerts logs. Default is 'DataminrPulse_Alerts' + + j. **AzureClientId**: Enter Azure Client ID that you have created during app registration + + k. **AzureClientSecret**: Enter Azure Client Secret that you have created during creating the client secret + + l. **AzureTenantId**: Enter Azure Tenant ID of your Azure Active Directory + + m. **AzureResourceGroupName**: Enter Azure Resource Group Name in which you want deploy the data connector + + n. **AzureWorkspaceName**: Enter Microsoft Sentinel Workspace Name of Log Analytics workspace + + o. **AzureSubscriptionId**: Enter Azure Subscription Id which is present in the subscription tab in Microsoft Sentinel + + p. **LogLevel**: Add log level or log severity value. Default is 'INFO' + + q. **Schedule**: Enter a valid Quartz Cron-Expression (Example: 0 0 0 * * *) +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**8. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Dataminr Pulse Microsoft Sentinel data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**9. 1) Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-DataminrPulseAlerts-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. DmPulseXXXXX). + + e. **Select a runtime:** Choose Python 3.8 or above. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**10. 2) Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective values (case-sensitive): + + a. **Function Name** + + b. **Location**: The location in which the data collection rules and data collection endpoints should be deployed. + + c. **Workspace**: Enter Workspace ID of log analytics Workspace ID + + d. **Workspace Key**: Enter Primary Key of log analytics Workspace + + e. **DataminrBaseURL**: Enter Base URL starting with "https://" followed by hostname (Example: https://gateway.dataminr.com/) + + f. **ClientId**: Enter your Dataminr account Client ID + + g. **ClientSecret**: Enter your Dataminr account Client Secret + + h. **AzureEntraObjectID**: Enter Object id of your Microsoft Entra App + + i. **AlertsTableName**: Enter name of the table used to store Dataminr Alerts logs. Default is 'DataminrPulse_Alerts' + + j. **AzureClientId**: Enter Azure Client ID that you have created during app registration + + k. **AzureClientSecret**: Enter Azure Client Secret that you have created during creating the client secret + + l. **AzureTenantId**: Enter Azure Tenant ID of your Azure Active Directory + + m. **AzureResourceGroupName**: Enter Azure Resource Group Name in which you want deploy the data connector + + n. **AzureWorkspaceName**: Enter Microsoft Sentinel Workspace Name of Log Analytics workspace + + o. **AzureSubscriptionId**: Enter Azure Subscription Id which is present in the subscription tab in Microsoft Sentinel + + p. **LogLevel**: Add log level or log severity value. Default is 'INFO' + + q. **Schedule**: Enter a valid Quartz Cron-Expression (Example: 0 0 0 * * *) + + r. **logAnalyticsUri** (optional) + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + +**STEP 7 - Post Deployment steps** + +**12. 1) Get the Function app endpoint** + +1. Go to Azure function Overview page and Click on **"Functions"** in the left blade. +2. Click on the function called **"DataminrPulseAlertsHttpStarter"**. +3. Go to **"GetFunctionurl"** and copy the function url. +4. Replace **{functionname}** with **"DataminrPulseAlertsSentinelOrchestrator"** in copied function url. + +**13. 2) To add integration settings in Dataminr RTAP using the function URL** + +1. Open any API request tool like Postman. +2. Click on '+' to create a new request. +3. Select HTTP request method as **'POST'**. +4. Enter the url prepapred in **point 1)**, in the request URL part. +5. In Body, select raw JSON and provide request body as below(case-sensitive): + { + "integration-settings": "ADD", + "url": "`(URL part from copied Function-url)`", + "token": "`(value of code parameter from copied Function-url)`" + } +6. After providing all required details, click **Send**. +7. You will receive an integration setting ID in the HTTP response with a status code of 200. +8. Save **Integration ID** for future reference. + +*Now we are done with the adding integration settings for Dataminr RTAP. Once the Dataminr RTAP send an alert data, Function app is triggered and you should be able to see the Alerts data from the Dataminr Pulse into LogAnalytics workspace table called "DataminrPulse_Alerts_CL".* + | | | |--------------------------|---| | **Tables Ingested** | `DataminrPulse_Alerts_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/delinea-secret-server.md b/Tools/Solutions Analyzer/connector-docs/solutions/delinea-secret-server.md index d327214fa51..1123272712d 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/delinea-secret-server.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/delinea-secret-server.md @@ -25,6 +25,64 @@ This solution provides **2 data connector(s)**. Common Event Format (CEF) from Delinea Secret Server +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Delinea Secret Server**: must be configured to export logs via Syslog + + [Learn more about configure Secret Server](https://thy.center/ss/link/syslog) + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Common Event Format (CEF) logs to Syslog agent** + +Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/digital-guardian-data-loss-prevention.md b/Tools/Solutions Analyzer/connector-docs/solutions/digital-guardian-data-loss-prevention.md index c5cbf62bc68..a09b6121398 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/digital-guardian-data-loss-prevention.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/digital-guardian-data-loss-prevention.md @@ -21,6 +21,89 @@ This solution provides **1 data connector(s)**. [Digital Guardian Data Loss Prevention (DLP)](https://digitalguardian.com/platform-overview) data connector provides the capability to ingest Digital Guardian DLP logs into Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**DigitalGuardianDLPEvent**](https://aka.ms/sentinel-DigitalGuardianDLP-parser) which is deployed with the Microsoft Sentinel Solution. + +**1. Configure Digital Guardian to forward logs via Syslog to remote server where you will install the agent.** + +Follow these steps to configure Digital Guardian to forward logs via Syslog: + +1.1. Log in to the Digital Guardian Management Console. + +1.2. Select **Workspace** > **Data Export** > **Create Export**. + +1.3. From the **Data Sources** list, select **Alerts** or **Events** as the data source. + +1.4. From the **Export type** list, select **Syslog**. + +1.5. From the **Type list**, select **UDP** or **TCP** as the transport protocol. + +1.6. In the **Server** field, type the IP address of your Remote Syslog server. + +1.7. In the **Port** field, type 514 (or other port if your Syslog server was configured to use non-default port). + +1.8. From the **Severity Level** list, select a severity level. + +1.9. Select the **Is Active** check box. + +1.9. Click **Next**. + +1.10. From the list of available fields, add Alert or Event fields for your data export. + +1.11. Select a Criteria for the fields in your data export and click **Next**. + +1.12. Select a group for the criteria and click **Next**. + +1.13. Click **Test Query**. + +1.14. Click **Next**. + +1.15. Save the data export. + +**2. Install and onboard the agent for Linux or Windows** + +Install the agent on the Server to which the logs will be forwarded. + +> Logs on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**3. Check logs in Microsoft Sentinel** + +Open Log Analytics to check if the logs are received using the Syslog schema. + +>**NOTE:** It may take up to 15 minutes before new logs will appear in Syslog table. + | | | |--------------------------|---| | **Tables Ingested** | `Syslog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/digital-shadows.md b/Tools/Solutions Analyzer/connector-docs/solutions/digital-shadows.md index 65687d111b7..0c70003b161 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/digital-shadows.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/digital-shadows.md @@ -20,6 +20,92 @@ This solution provides **1 data connector(s)**. The Digital Shadows data connector provides ingestion of the incidents and alerts from Digital Shadows Searchlight into the Microsoft Sentinel using the REST API. The connector will provide the incidents and alerts information such that it helps to examine, diagnose and analyse the potential security risks and threats. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **Digital Shadows account ID, secret and key** is required. See the documentation to learn more about API on the `https://portal-digitalshadows.com/learn/searchlight-api/overview/description`. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to a 'Digital Shadows Searchlight' to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Configuration steps for the 'Digital Shadows Searchlight' API** + +The provider should provide or link to detailed steps to configure the 'Digital Shadows Searchlight' API endpoint so that the Azure Function can authenticate to it successfully, get its authorization key or token, and pull the appliance's logs into Microsoft Sentinel. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the 'Digital Shadows Searchlight' connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the 'Digital Shadows Searchlight' API authorization key(s) or Token, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the 'Digital Shadows Searchlight' connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-Digitalshadows-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password**, 'and/or Other required fields'. +>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**Option 2 - Manual Deployment of Azure Functions** + + Use the following step-by-step instructions to deploy the 'Digital Shadows Searchlight' connector manually with Azure Functions. + +**1. Create a Function App** + +1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp). +2. Click **+ Create** at the top. +3. In the **Basics** tab, ensure Runtime stack is set to **python 3.11**. +4. In the **Hosting** tab, ensure **Plan type** is set to **'Consumption (Serverless)'**. +5.select Storage account +6. 'Add other required configurations'. +5. 'Make other preferable configuration changes', if needed, then click **Create**. + +**2. Import Function App Code(Zip deployment)** + +1. Install Azure CLI +2. From terminal type **az functionapp deployment source config-zip -g -n --src ** and hit enter. Set the `ResourceGroup` value to: your resource group name. Set the `FunctionApp` value to: your newly created function app name. Set the `Zip File` value to: `digitalshadowsConnector.zip`(path to your zip file). Note:- Download the zip file from the link - [Function App Code](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Digital%20Shadows/Data%20Connectors/Digital%20Shadows/digitalshadowsConnector.zip) + +**3. Configure the Function App** + +1. In the Function App screen, click the Function App name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following 'x (number of)' application settings individually, under Name, with their respective string values (case-sensitive) under Value: + DigitalShadowsAccountID + WorkspaceID + WorkspaceKey + DigitalShadowsKey + DigitalShadowsSecret + HistoricalDays + DigitalShadowsURL + ClassificationFilterOperation + HighVariabilityClassifications + FUNCTION_NAME + logAnalyticsUri (optional) +(add any other settings required by the Function App) +Set the `DigitalShadowsURL` value to: `https://api.searchlight.app/v1` +Set the `HighVariabilityClassifications` value to: `exposed-credential,marked-document` +Set the `ClassificationFilterOperation` value to: `exclude` for exclude function app or `include` for include function app +>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Azure Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: https://.ods.opinsights.azure.us. +4. Once all application settings have been entered, click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `DigitalShadows_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/doppel.md b/Tools/Solutions Analyzer/connector-docs/solutions/doppel.md index 6214b842e9d..4403ff16fa0 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/doppel.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/doppel.md @@ -21,6 +21,114 @@ This solution provides **1 data connector(s)**. The data connector is built on Microsoft Sentinel for Doppel events and alerts and supports DCR-based [ingestion time transformations](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/ingestion-time-transformations) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. + +**Custom Permissions:** +- **Microsoft Entra Tenant ID, Client ID and Client Secret**: Microsoft Entra ID requires a Client ID and Client Secret to authenticate your application. Additionally, Global Admin/Owner level access is required to assign the Entra-registered application a Resource Group Monitoring Metrics Publisher role. +- **Requires Workspace ID, DCE-URI, DCR-ID**: You will need to get the Log Analytics Workspace ID, DCE Logs Ingestion URI and DCR Immutable ID for the configuration. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Configure Doppel Webhook** + +Configure the Webhook in Doppel and Endpoint with permissions in Microsoft Sentinel to send data. +**Register the Application in Microsoft Entra ID** + + 1. **Open the [Microsoft Entra ID page](https://entra.microsoft.com/)**: + - Click the provided link to open the **Microsoft Entra ID** registration page in a new tab. + - Ensure you are logged in with an account that has **Admin level** permissions. + +2. **Create a New Application**: + - In the **Microsoft Entra ID portal**, select **App registrations** mentioned on the left-hand side tab. + - Click on **+ New registration**. + - Fill out the following fields: + - **Name**: Enter a name for the app (e.g., “Doppel App”). + - **Supported account types**: Choose **Accounts in this organizational directory only** (Default Directory only - Single tenant). + - **Redirect URI**: Leave this blank unless required otherwise. + - Click **Register** to create the application. + +3. **Copy Application and Tenant IDs**: + - Once the app is registered, note the **Application (client) ID** and **Directory (tenant) ID** from the **Overview** page. You’ll need these for the integration. + +4. **Create a Client Secret**: + - In the **Certificates & secrets** section, click **+ New client secret**. + - Add a description (e.g., 'Doppel Secret') and set an expiration (e.g., 1 year). + - Click **Add**. + - **Copy the client secret value immediately**, as it will not be shown again. + + **Assign the "Monitoring Metrics Publisher" Role to the App** + + 1. **Open the Resource Group in Azure Portal**: + - Navigate to the **Resource Group** that contains the **Log Analytics Workspace** and **Data Collection Rules (DCRs)** where you want the app to push data. + +2. **Assign the Role**: + - In the **Resource Group** menu, click on **Access control (IAM)** mentioned on the left-hand side tab .. + - Click on **+ Add** and select **Add role assignment**. + - In the **Role** dropdown, search for and select the **Monitoring Metrics Publisher** role. + - Under **Assign access to**, choose **Azure AD user, group, or service principal**. + - In the **Select** field, search for your registered app by **name** or **client ID**. + - Click **Save** to assign the role to the application. + + **Deploy the ARM Template** + + 1. **Retrieve the Workspace ID**: + - After assigning the role, you will need the **Workspace ID**. + - Navigate to the **Log Analytics Workspace** within the **Resource Group**. + - In the **Overview** section, locate the **Workspace ID** field under **Workspace details**. + - **Copy the Workspace ID** and keep it handy for the next steps. + +2. **Click the Deploy to Azure Button**: + - [![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fmetron-labs%2FAzure-Sentinel%2Frefs%2Fheads%2FDoppelSolution%2FSolutions%2FDoppel%2FData%2520Connectors%2FDeployToAzure.json). + - This will take you directly to the Azure portal to start the deployment. + +3. **Review and Customize Parameters**: + - On the custom deployment page, ensure you’re deploying to the correct **subscription** and **resource group**. + - Fill in the parameters like **workspace name**, **workspace ID**, and **workspace location**. + +4. **Click Review + Create** and then **Create** to deploy the resources. + + **Verify DCE, DCR, and Log Analytics Table Setup** + + 1. **Check the Data Collection Endpoint (DCE)**: + - After deploying, go to **Azure Portal > Data Collection Endpoints**. + - Verify that the **DoppelDCE** endpoint has been created successfully. + - **Copy the DCE Logs Ingestion URI**, as you’ll need this for generating the webhook URL. + +2. **Confirm Data Collection Rule (DCR) Setup**: + - Go to **Azure Portal > Data Collection Rules**. + - Ensure the **DoppelDCR** rule is present. + - **Copy the Immutable ID** of the DCR from the Overview page, as you’ll need it for the webhook URL. + +3. **Validate Log Analytics Table**: + - Navigate to your **Log Analytics Workspace** (linked to Microsoft Sentinel). + - Under the **Tables** section, verify that the **DoppelTable_CL** table has been created successfully and is ready to receive data. + + **Integrate Doppel Alerts with Microsoft Sentinel** + + 1. **Gather Necessary Information**: + - Collect the following details required for integration: + - **Data Collection Endpoint ID (DCE-ID)** + - **Data Collection Rule ID (DCR-ID)** + - **Microsoft Entra Credentials**: Tenant ID, Client ID, and Client Secret. + +2. **Coordinate with Doppel Support**: + - Share the collected DCE-ID, DCR-ID, and Microsoft Entra credentials with Doppel support. + - Request assistance to configure these details in the Doppel tenant to enable webhook setup. + +3. **Webhook Setup by Doppel**: + - Doppel will use the provided Resource IDs and credentials to configure a webhook. + - This webhook will facilitate the forwarding of alerts from Doppel to Microsoft Sentinel. + +4. **Verify Alert Delivery in Microsoft Sentinel**: + - Check that alerts from Doppel are successfully forwarded to Microsoft Sentinel. + - Validate that the **Workbook** in Microsoft Sentinel is updated with the alert statistics, ensuring seamless data integration. + | | | |--------------------------|---| | **Tables Ingested** | `DoppelTable_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/dragos.md b/Tools/Solutions Analyzer/connector-docs/solutions/dragos.md index 2fe29e094ee..8b87209d709 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/dragos.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/dragos.md @@ -22,6 +22,26 @@ This solution provides **1 data connector(s)**. The [Dragos Platform](https://www.dragos.com/) is the leading Industrial Cyber Security platform it offers a comprehensive Operational Technology (OT) cyber threat detection built by unrivaled industrial cybersecurity expertise. This solution enables Dragos Platform notification data to be viewed in Microsoft Sentinel so that security analysts are able to triage potential cyber security events occurring in their industrial environments. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **Dragos Sitestore API access**: A Sitestore user account that has the `notification:read` permission. This account also needs to have an API key that can be provided to Sentinel. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Please provide the following information to allow Microsoft Sentinel to connect to your Dragos Sitestore. +- **Dragos Sitestore Hostname**: dragossitestore.example.com +- **Dragos Sitestore API Key ID**: Enter the API key ID. +- **Dragos Sitestore API Key Secret**: (password field) +- **Minimum Notification Severity. Valid values are 0-5 inclusive. Ensure less than or equal to maximum severity.**: Enter the min severity (recommend 0 for all notifications) +- **Maximum Notification Severity. Valid values are 0-5 inclusive. Ensure greater than or equal to minimum severity.**: Enter the max severity (recommend 5 for all notifications) +- Click 'Connect to Sitestore' to establish connection + | | | |--------------------------|---| | **Tables Ingested** | `DragosAlerts_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/druvadatasecuritycloud.md b/Tools/Solutions Analyzer/connector-docs/solutions/druvadatasecuritycloud.md index 4034d656932..0556a65871c 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/druvadatasecuritycloud.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/druvadatasecuritycloud.md @@ -21,6 +21,35 @@ This solution provides **1 data connector(s)**. Provides capability to ingest the Druva events from Druva APIs +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permission are required + +**Custom Permissions:** +- **Druva API Access**: Druva API requires a client id and client secret to authenticate + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>Note: Configurations to connect to Druva Rest API + +Step 1: Create credentials from Druva console. Refer this doc for steps:- https://help.druva.com/en/articles/8580838-create-and-manage-api-credentials + +Step 2: Enter the hostname. For public cloud its apis.druva.com + +Step 3: Enter client id and client secret key + +**4. Connect to Druva API to start collecting logs in Microsoft Sentinel** + +Provide required values: +- **Hostname**: Example: apis.druva.com +- **OAuth Configuration**: + - Client ID + - Client Secret + - Click 'Connect' to authenticate + | | | |--------------------------|---| | **Tables Ingested** | `DruvaInsyncEvents_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/dynamics-365.md b/Tools/Solutions Analyzer/connector-docs/solutions/dynamics-365.md index c4b730f19b6..99338c17fb6 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/dynamics-365.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/dynamics-365.md @@ -21,6 +21,22 @@ This solution provides **1 data connector(s)**. The Dynamics 365 Common Data Service (CDS) activities connector provides insight into admin, user, and support activities, as well as Microsoft Social Engagement logging events. By connecting Dynamics 365 CRM logs into Microsoft Sentinel, you can view this data in workbooks, use it to create custom alerts, and improve your investigation process. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com//fwlink/p/?linkid=2226719&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. + +**Custom Permissions:** +- **Tenant Permissions**: 'Security Administrator' or 'Global Administrator' on the workspace's tenant. +- **License**: [Microsoft Dynamics 365 production license](https://docs.microsoft.com/office365/servicedescriptions/microsoft-dynamics-365-online-service-description) (This connector is available for production environments only, not for sandbox). Also, a Microsoft 365 Enterprise [E3 or E5](https://docs.microsoft.com/power-platform/admin/enable-use-comprehensive-auditing#requirements) subscription is required for Activity Logging. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Connect [Dynamics 365 CRM](https://aka.ms/Sentinel/Dynamics365) activity logs to your Microsoft Sentinel workspace. +- Connect Dynamics365 + | | | |--------------------------|---| | **Tables Ingested** | `Dynamics365Activity` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/dynatrace.md b/Tools/Solutions Analyzer/connector-docs/solutions/dynatrace.md index 13dd51aa205..f6891e7c916 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/dynatrace.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/dynatrace.md @@ -34,6 +34,25 @@ This solution provides **4 data connector(s)**. This connector uses the [Dynatrace Security Problem REST API](https://docs.dynatrace.com/docs/dynatrace-api/environment-api/application-security/vulnerabilities/get-vulnerabilities) to ingest detected runtime vulnerabilities into Microsoft Sentinel Log Analytics. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Custom Permissions:** +- **Dynatrace tenant (ex. xyz.dynatrace.com)**: You need a valid Dynatrace tenant with [Application Security](https://www.dynatrace.com/platform/application-security/) enabled, learn more about the [Dynatrace platform](https://www.dynatrace.com/). +- **Dynatrace Access Token**: You need a Dynatrace Access Token, the token should have ***Read security problems*** (securityProblems.read) scope. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Dynatrace Vulnerabilities Events to Microsoft Sentinel** + +Configure and Enable Dynatrace [Application Security](https://www.dynatrace.com/platform/application-security/). + Follow [these instructions](https://docs.dynatrace.com/docs/shortlink/token#create-api-token) to generate an access token. +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `APIKey`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + | | | |--------------------------|---| | **Tables Ingested** | `DynatraceSecurityProblems_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/egress-defend.md b/Tools/Solutions Analyzer/connector-docs/solutions/egress-defend.md index 5b5a641f9e5..a5059d05307 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/egress-defend.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/egress-defend.md @@ -21,6 +21,23 @@ This solution provides **1 data connector(s)**. The Egress Defend audit connector provides the capability to ingest Egress Defend Data into Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions on the Log Analytics workspace are required to enable the data connector. + +**Custom Permissions:** +- **Egress API Token**: An Egress API token is required to ingest audit records to Microsoft Sentinel. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Egress Defend with Microsoft Sentinel** + +Enter your Egress Defend API URl, Egress Domain and API token. +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `APIKey`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + | | | |--------------------------|---| | **Tables Ingested** | `EgressDefend_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/egress-iris.md b/Tools/Solutions Analyzer/connector-docs/solutions/egress-iris.md index f456fff3771..2f8dc6adace 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/egress-iris.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/egress-iris.md @@ -21,6 +21,23 @@ This solution provides **1 data connector(s)**. The Egress Iris connector will allow you to ingest Egress data into Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions on the Log Analytics workspace are required to enable the data connector. + +**Custom Permissions:** +- **Egress API Token**: An Egress API token is required to ingest audit records to Microsoft Sentinel. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Egress Data with Microsoft Sentinel** + +Enter your Egress API Hostname and secret. +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `APIKey`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + | | | |--------------------------|---| | **Tables Ingested** | `DefendAuditData` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/elasticagent.md b/Tools/Solutions Analyzer/connector-docs/solutions/elasticagent.md index 7d3bd83c119..fe0c03767ef 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/elasticagent.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/elasticagent.md @@ -21,6 +21,77 @@ This solution provides **1 data connector(s)**. The [Elastic Agent](https://www.elastic.co/security) data connector provides the capability to ingest Elastic Agent logs, metrics, and security data into Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Include custom pre-requisites if the connectivity requires - else delete customs**: Description for any custom pre-requisite + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**ElasticAgentEvent**](https://aka.ms/sentinel-ElasticAgent-parser) which is deployed with the Microsoft Sentinel Solution. + +>**NOTE:** This data connector has been developed using **Elastic Agent 7.14**. + +**1. Install and onboard the agent for Linux or Windows** + +Install the agent on the Server where the Elastic Agent logs are forwarded. + +> Logs from Elastic Agents deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure Elastic Agent (Standalone)** + +[Follow the instructions](https://www.elastic.co/guide/en/fleet/current/elastic-agent-configuration.html) to configure Elastic Agent to output to Logstash + +**3. Configure Logstash to use Microsoft Logstash Output Plugin** + +Follow the steps to configure Logstash to use microsoft-logstash-output-azure-loganalytics plugin: + +3.1) Check if the plugin is already installed: +> ./logstash-plugin list | grep 'azure-loganalytics' +**(if the plugin is installed go to step 3.3)** + +3.2) Install plugin: +> ./logstash-plugin install microsoft-logstash-output-azure-loganalytics + +3.3) [Configure Logstash](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/microsoft-logstash-output-azure-loganalytics) to use the plugin + +**4. Validate log ingestion** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using custom table specified in step 3.3 (e.g. ElasticAgentLogs_CL). + +>It may take about 30 minutes until the connection streams data to your workspace. + | | | |--------------------------|---| | **Tables Ingested** | `ElasticAgentLogs_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/ermes-browser-security.md b/Tools/Solutions Analyzer/connector-docs/solutions/ermes-browser-security.md index 5f30d132f3a..acc4afd8fa7 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/ermes-browser-security.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/ermes-browser-security.md @@ -21,6 +21,27 @@ This solution provides **1 data connector(s)**. Ermes Browser Security Events +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +**Custom Permissions:** +- **Ermes Client Id and Client Secret**: Enable API access in Ermes. Please contact [Ermes Cyber Security](https://www.ermes.company) support for more information. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Ermes Browser Security Events to Microsoft Sentinel** + +Connect using OAuth2 credentials +- **OAuth Configuration**: + - Client ID + - Client Secret + - Click 'Connect' to authenticate + | | | |--------------------------|---| | **Tables Ingested** | `ErmesBrowserSecurityEvents_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/eset-inspect.md b/Tools/Solutions Analyzer/connector-docs/solutions/eset-inspect.md index 6b4ee5a31e7..b404aa89907 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/eset-inspect.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/eset-inspect.md @@ -21,6 +21,55 @@ This solution provides **1 data connector(s)**. This connector will ingest detections from [ESET Inspect](https://www.eset.com/int/business/solutions/xdr-extended-detection-and-response/) using the provided [REST API](https://help.eset.com/ei_navigate/latest/en-US/api.html). This API is present in ESET Inspect version 1.4 and later. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Access to the ESET PROTECT console**: Permissions to add users + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to ESET Inspect to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**1. Step 1 - Create an API user** + +1. Log into the ESET PROTECT console with an administrator account, select the **More** tab and the **Users** subtab. +2. Click on the **ADD NEW** button and add a **native user**. +3. Create a new user for the API account. **Optional:** Select a **Home group** other than **All** to limit what detections are ingested. +4. Under the **Permission Sets** tab, assign the **Inspect reviewer permission set**. +4. Log out of the administrator account and log into the console with the new API credentials for validation, then log out of the API account. +5. + +**2. Step 2 - Copy Workspace ID and Key** + +>**IMPORTANT:** Before deploying the ESET Inspect connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Step 3 - Deploy the Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the ESET Inspect connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESETInspect-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password** , enter the **Inspect base URL** and the **first ID** to start ingesting detections from. + - The defailt starting ID is **0**. This means that all detections will be ingested. + - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. +4. Mark the checkbox labelled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + | | | |--------------------------|---| | **Tables Ingested** | `ESETInspect_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/eset-protect-platform.md b/Tools/Solutions Analyzer/connector-docs/solutions/eset-protect-platform.md index bf6b82de93f..0f8feaf41e6 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/eset-protect-platform.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/eset-protect-platform.md @@ -22,6 +22,48 @@ This solution provides **1 data connector(s)**. The ESET Protect Platform data connector enables users to inject detections data from [ESET Protect Platform](https://www.eset.com/int/business/protect-platform/) using the provided [Integration REST API](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESET%20Protect%20Platform/Data%20Connectors). Integration REST API runs as scheduled Azure Function App. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Permission to register an application in Microsoft Entra ID**: Sufficient permissions to register an application with your Microsoft Entra tenant are required. +- **Permission to assign a role to the registered application**: Permission to assign the Monitoring Metrics Publisher role to the registered application in Microsoft Entra ID is required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** The ESET Protect Platform data connector uses Azure Functions to connect to the ESET Protect Platform via Eset Connect API to pull detections logs into Microsoft Sentinel. This process might result in additional data ingestion costs. See details on the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/). + +>**NOTE:** The newest version of the ESET PROTECT Platform and Microsoft Sentinel integration pulls not only detections logs but also newly created incidents. If your integration was set up before 20.06.2025, please follow [these steps](https://help.eset.com/eset_connect/en-US/update_ms_sentinel_integration.html) to update it. + +**1. Step 1 - Create an API user** + +Use this [instruction](https://help.eset.com/eset_connect/en-US/create_api_user_account.html) to create an ESET Connect API User account with **Login** and **Password**. + +**2. Step 2 - Create a registered application** + +Create a Microsoft Entra ID registered application by following the steps in the [Register a new application instruction.](https://learn.microsoft.com/en-us/entra/identity-platform/quickstart-register-app) + +**3. Step 3 - Deploy the ESET Protect Platform data connector using the Azure Resource Manager (ARM) template** + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-EsetProtectionPlatform-azuredeploy) + +2. Select the name of the **Log Analytics workspace** associated with your Microsoft Sentinel. Select the same **Resource Group** as the Resource Group of the Log Analytics workspace. + +3. Type the parameters of the registered application in Microsoft Entra ID: **Azure Client ID**, **Azure Client Secret**, **Azure Tenant ID**, **Object ID**. You can find the **Object ID** on Azure Portal by following this path +> Microsoft Entra ID -> Manage (on the left-side menu) -> Enterprise applications -> Object ID column (the value next to your registered application name). + +4. Provide the ESET Connect API user account **Login** and **Password** obtained in **Step 1**. + +5. Select one or more ESET products (ESET PROTECT, ESET Inspect, ESET Cloud Office Security) from which detections are retrieved. + | | | |--------------------------|---| | **Tables Ingested** | `IntegrationTableIncidents_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/eset-security-management-center.md b/Tools/Solutions Analyzer/connector-docs/solutions/eset-security-management-center.md index 3d13b088eeb..64acae4208a 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/eset-security-management-center.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/eset-security-management-center.md @@ -21,6 +21,132 @@ This solution provides **1 data connector(s)**. Connector for [Eset SMC](https://help.eset.com/esmc_admin/72/en-US/) threat events, audit logs, firewall events and web sites filter. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Access to Eset SMC console**: Permissions to configure log export + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure rsyslog to accept logs from your Eset SMC IP address. + +``` +sudo -i + +# Set ESET SMC source IP address +export ESETIP={Enter your IP address} + +# Create rsyslog configuration file +cat > /etc/rsyslog.d/80-remote.conf << EOF +\$ModLoad imudp +\$UDPServerRun 514 +\$ModLoad imtcp +\$InputTCPServerRun 514 +\$AllowedSender TCP, 127.0.0.1, $ESETIP +\$AllowedSender UDP, 127.0.0.1, $ESETIP +user.=alert;user.=crit;user.=debug;user.=emerg;user.=err;user.=info;user.=notice;user.=warning @127.0.0.1:25224 +EOF + +# Restart rsyslog +systemctl restart rsyslog``` + +**3. Configure OMS agent to pass Eset SMC data in API format** + +In order to easily recognize Eset data we will push it to separate table and parse at agent so query in Azure Sentinel is easier and fast. To make it simple we will just modify ```match oms.**``` section to send data as API objects by changing type to out_oms_api. Modify file on /etc/opt/microsoft/omsagent/{REPLACEyourworkspaceid}/conf/omsagent.conf. Full ```match oms.**``` section looks like this: + +``` + + type out_oms_api + log_level info + num_threads 5 + run_in_background false + + omsadmin_conf_path /etc/opt/microsoft/omsagent/{REPLACEyourworkspaceid}/conf/omsadmin.conf + cert_path /etc/opt/microsoft/omsagent/{REPLACEyourworkspaceid}/certs/oms.crt + key_path /etc/opt/microsoft/omsagent/{REPLACEyourworkspaceid}/certs/oms.key + + buffer_chunk_limit 15m + buffer_type file + buffer_path /var/opt/microsoft/omsagent/{REPLACEyourworkspaceid}/state/out_oms_common*.buffer + + buffer_queue_limit 10 + buffer_queue_full_action drop_oldest_chunk + flush_interval 20s + retry_limit 10 + retry_wait 30s + max_retry_wait 9m + +``` + +**4. Change OMS agent configuration to catch tag oms.api.eset and parse structured data** + +Modify file /etc/opt/microsoft/omsagent/{REPLACEyourworkspaceid}/conf/omsagent.d/syslog.conf +``` + + type syslog + port 25224 + bind 127.0.0.1 + protocol_type udp + tag oms.api.eset + + + + @type parser + key_name message + format /(?.*?{.*})/ + + + + @type parser + key_name message + format json + +``` + +**5. Disable automatic configuration and restart agent** + +```bash +# Disable changes to configuration files from Portal +sudo su omsagent -c 'python /opt/microsoft/omsconfig/Scripts/OMS_MetaConfigHelper.py --disable' + +# Restart agent +sudo /opt/microsoft/omsagent/bin/service_control restart + +# Check agent logs +tail -f /var/opt/microsoft/omsagent/log/omsagent.log +``` + +**6. Configure Eset SMC to send logs to connector** + +Configure Eset Logs using BSD style and JSON format. +- Go to Syslog server configuration as described in [Eset documentation](https://help.eset.com/esmc_admin/72/en-US/admin_server_settings.html?admin_server_settings_syslog.html) and configure Host (your connector), Format BSD, Transport TCP +- Go to Logging section as described in [Eset documentation](https://help.eset.com/esmc_admin/72/en-US/admin_server_settings.html?admin_server_settings_export_to_syslog.html) and enable JSON + | | | |--------------------------|---| | **Tables Ingested** | `eset_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/esetprotect.md b/Tools/Solutions Analyzer/connector-docs/solutions/esetprotect.md index 1a95034f1c1..381ba09fe18 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/esetprotect.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/esetprotect.md @@ -21,6 +21,54 @@ This solution provides **1 data connector(s)**. This connector gathers all events generated by ESET software through the central management solution ESET PROTECT (formerly ESET Security Management Center). This includes Anti-Virus detections, Firewall detections but also more advanced EDR detections. For a complete list of events please refer to [the documentation](https://help.eset.com/protect_admin/latest/en-US/events-exported-to-json-format.html). +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias ESETPROTECT and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESETPROTECT/Parsers/ESETPROTECT.txt).The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + +1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. +2. Select **Apply below configuration to my machines** and select the facilities and severities. The default ESET PROTECT facility is **user**. +3. Click **Save**. +- **Open Syslog settings** + +**3. Configure ESET PROTECT** + +Configure ESET PROTECT to send all events through Syslog. + +1. Follow [these instructions](https://help.eset.com/protect_admin/latest/en-US/admin_server_settings_syslog.html) to configure syslog output. Make sure to select **BSD** as the format and **TCP** as the transport. + +2. Follow [these instructions](https://help.eset.com/protect_admin/latest/en-US/admin_server_settings_export_to_syslog.html) to export all logs to syslog. Select **JSON** as the output format. + +Note:- Refer to the [documentation](https://learn.microsoft.com/en-us/azure/sentinel/connect-log-forwarder?tabs=rsyslog#security-considerations) for setting up the log forwarder for both local and cloud storage. +- **Open Syslog settings** + | | | |--------------------------|---| | **Tables Ingested** | `Syslog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/exabeam-advanced-analytics.md b/Tools/Solutions Analyzer/connector-docs/solutions/exabeam-advanced-analytics.md index 0d01054a7f0..ced9b8aa1f9 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/exabeam-advanced-analytics.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/exabeam-advanced-analytics.md @@ -21,6 +21,57 @@ This solution provides **1 data connector(s)**. The [Exabeam Advanced Analytics](https://www.exabeam.com/ueba/advanced-analytics-and-mitre-detect-and-stop-threats/) data connector provides the capability to ingest Exabeam Advanced Analytics events into Microsoft Sentinel. Refer to [Exabeam Advanced Analytics documentation](https://docs.exabeam.com/) for more information. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Exabeam Advanced Analytics and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Exabeam%20Advanced%20Analytics/Parsers/ExabeamEvent.txt), on the second line of the query, enter the hostname(s) of your Exabeam Advanced Analytics device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. + +>**NOTE:** This data connector has been developed using Exabeam Advanced Analytics i54 (Syslog) + +**1. Install and onboard the agent for Linux or Windows** + +Install the agent on the server where the Exabeam Advanced Analytic logs are generated or forwarded. + +> Logs from Exabeam Advanced Analytic deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure the logs to be collected** + +Configure the custom log directory to be collected +- **Open custom logs settings** + +**3. Configure Exabeam event forwarding to Syslog** + +[Follow these instructions](https://docs.exabeam.com/en/advanced-analytics/i56/advanced-analytics-administration-guide/125351-advanced-analytics.html#UUID-7ce5ff9d-56aa-93f0-65de-c5255b682a08) to send Exabeam Advanced Analytics activity log data via syslog. + | | | |--------------------------|---| | **Tables Ingested** | `Syslog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/extrahop-reveal(x).md b/Tools/Solutions Analyzer/connector-docs/solutions/extrahop-reveal(x).md index b009d38815e..691d586abe0 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/extrahop-reveal(x).md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/extrahop-reveal(x).md @@ -25,6 +25,64 @@ This solution provides **2 data connector(s)**. The ExtraHop Reveal(x) data connector enables you to easily connect your Reveal(x) system with Microsoft Sentinel to view dashboards, create custom alerts, and improve investigation. This integration gives you the ability to gain insight into your organization's network and improve your security operation capabilities. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward ExtraHop Networks logs to Syslog agent** + + 1. Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure to send the logs to port 514 TCP on the machine IP address. +2. Follow the directions to install the [ExtraHop Detection SIEM Connector bundle](https://aka.ms/asi-syslog-extrahop-forwarding) on your Reveal(x) system. The SIEM Connector is required for this integration. +3. Enable the trigger for **ExtraHop Detection SIEM Connector - CEF** +4. Update the trigger with the ODS syslog targets you created  +5. The Reveal(x) system formats syslog messages in Common Event Format (CEF) and then sends data to Microsoft Sentinel. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/extrahop.md b/Tools/Solutions Analyzer/connector-docs/solutions/extrahop.md index 3c0a240d660..22d7176ccee 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/extrahop.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/extrahop.md @@ -22,6 +22,127 @@ This solution provides **1 data connector(s)**. The [ExtraHop](https://extrahop.com/) Detections Data Connector enables you to import detection data from ExtraHop RevealX to Microsoft Sentinel through webhook payloads. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Azure Subscription**: Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group. +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **ExtraHop RevealX permissions**: The following is required on your ExtraHop RevealX system: + 1.Your RevealX system must be running firmware version 9.9.2 or later. + 2.Your RevealX system must be connected to ExtraHop Cloud Services. + 3.Your user account must have System Administratin privileges on RevealX 360 or Full Write privileges on RevealX Enterprise. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the ExtraHop in which logs are pushed via ExtraHop webhook and it will ingest logs into Microsoft Sentinel. Furthermore, the connector will fetch the ingested data from the custom logs table and create Threat Intelligence Indicators into Microsoft Sentinel Threat Intelligence. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias **ExtraHopDetections** and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ExtraHop/Parsers/ExtraHopDetections.yaml). The function usually takes 10-15 minutes to activate after solution installation/update. + +**STEP 1 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the ExtraHop Microsoft Sentinel data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Complete the following steps for automated deployment of the ExtraHop Detections Data Connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ExtraHop-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the values for the following parameters: + + a. **Function Name** - Enter the Function Name you want. + + b. **Workspace ID** - Enter the Workspace ID of the log analytics Workspace. + + c. **Workspace Key** - Enter the Workspace Key of the log analytics Workspace. + + d. **Detections Table Name** - Enter the name of the table used to store ExtraHop detection data. + + e. **LogLevel** - Select Debug, Info, Error, or Warning for the log level or log severity value. + + f. **AppInsightsWorkspaceResourceID** - Enter the value of the 'Log Analytic Workspace-->Properties-->Resource ID' property. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Complete the following steps to manually deploy the ExtraHop Detections Data Connector with Azure Functions (Deployment via Visual Studio Code). + +**5. 1) Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-ExtraHop-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ExtraHopXXXXX). + + e. **Select a runtime:** Choose Python 3.11 or above. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**6. 2) Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with the following respective values (case-sensitive): + + a. **Function Name** - Enter the Function Name you want. + + b. **Workspace ID** - Enter the Workspace ID of the log analytics Workspace. + + c. **Workspace Key** - Enter the Workspace Key of the log analytics Workspace. + + d. **Detections Table Name** - Enter the name of the table used to store ExtraHop detection data. + + e. **LogLevel** - Select Debug, Info, Error, or Warning for the log level or log severity value. + + f. **logAnalyticsUri (optional)** - Configure this option to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + +**STEP 2 - Post Deployment** + +**8. 1) Get the Function App endpoint** + +1. Go to the Azure function overview page and click the **"Functions"** tab. +2. Click on the function called **"ExtraHopHttpStarter"**. +3. Go to **"GetFunctionurl"** and copy the function url available under **"default (Function key)"**. +4. Replace **{functionname}** with **"ExtraHopDetectionsOrchestrator"** in copied function url. + +**9. 2) Configure a connection to Microsoft Sentinel and specify webhook payload criteria from RevealX** + +From your ExtraHop system, configure the Microsoft Sentinel integration to establish a connection between Microsoft Sentinel and ExtraHop RevealX and to create detection notification rules that will send webhook data to Microsoft Sentinel. For detailed instructions, refer to [Integrate ExtraHop RevealX with Microsoft Sentinel SIEM](https://docs.extrahop.com/current/integrations-microsoft-sentinel-siem/). + +*After notification rules have been configured and Microsoft Sentinel is receiving webhook data, the Function App is triggered and you can view ExtraHop detections from the Log Analytics workspace table named "ExtraHop_Detections_CL".* + | | | |--------------------------|---| | **Tables Ingested** | `ExtraHop_Detections_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/f5-big-ip.md b/Tools/Solutions Analyzer/connector-docs/solutions/f5-big-ip.md index 3d25518318b..cd5ff1f5ddb 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/f5-big-ip.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/f5-big-ip.md @@ -21,6 +21,24 @@ This solution provides **1 data connector(s)**. The F5 firewall connector allows you to easily connect your F5 logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Configure and connect F5 BIGIP** + +To connect your F5 BIGIP, you have to post a JSON declaration to the system’s API endpoint. For instructions on how to do this, see [Integrating the F5 BGIP with Microsoft Sentinel](https://aka.ms/F5BigIp-Integrate). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `F5Telemetry_ASM_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/f5-networks.md b/Tools/Solutions Analyzer/connector-docs/solutions/f5-networks.md index 29725118e5a..fd65ef868f8 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/f5-networks.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/f5-networks.md @@ -25,6 +25,69 @@ This solution provides **2 data connector(s)**. The F5 firewall connector allows you to easily connect your F5 logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Common Event Format (CEF) logs to Syslog agent** + + Configure F5 to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent. + +Go to [F5 Configuring Application Security Event Logging](https://aka.ms/asi-syslog-f5-forwarding), follow the instructions to set up remote logging, using the following guidelines: + +1. Set the **Remote storage type** to CEF. +2. Set the **Protocol setting** to UDP. +3. Set the **IP address** to the Syslog server IP address. +4. Set the **port number** to 514, or the port your agent uses. +5. Set the **facility** to the one that you configured in the Syslog agent (by default, the agent sets this to local4). +6. You can set the **Maximum Query String Size** to be the same as you configured. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/feedly.md b/Tools/Solutions Analyzer/connector-docs/solutions/feedly.md index 8a03fe7cf68..003455dc8a3 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/feedly.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/feedly.md @@ -21,6 +21,127 @@ This solution provides **1 data connector(s)**. This connector allows you to ingest IoCs from Feedly. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Azure AD Application Registration**: An Azure AD App Registration with client credentials and permissions to write to the Data Collection Rule. The application must be granted 'Monitoring Metrics Publisher' role on the DCR. +- **Data Collection Endpoint and Rule**: A Data Collection Endpoint (DCE) and Data Collection Rule (DCR) must be created before deploying this connector. [See the documentation to learn more](https://learn.microsoft.com/azure/azure-monitor/logs/custom-logs-migrate). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions and the Logs Ingestion API to pull IoCs from Feedly into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +**1. Step 1 - Prepare Your Environment** + +The Feedly connector will automatically create: + +- **Custom Table**: `feedly_indicators_CL` with the required schema +- **Data Collection Endpoint (DCE)**: For ingesting data +- **Data Collection Rule (DCR)**: For processing and routing data + +No manual resource creation is required - everything will be created during deployment! + +For detailed instructions, see: [Migrate from HTTP Data Collector API to Logs Ingestion API](https://learn.microsoft.com/azure/azure-monitor/logs/custom-logs-migrate) + +**2. Step 2 - Deploy the Connector** + +The ARM template will automatically: + +1. Create a managed identity for the Azure Function +2. Assign the **Monitoring Metrics Publisher** role to the Function App on the DCR +3. Configure all necessary permissions for data ingestion + +No manual role assignments are required - everything is handled automatically during deployment! + +**3. Step 3 - Get your Feedly API token** + +Go to https://feedly.com/i/team/api and generate a new API token for the connector. + +**4. (Optional Step) Securely store credentials in Azure Key Vault** + +Azure Key Vault provides a secure mechanism to store and retrieve secrets. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App by using the `@Microsoft.KeyVault(SecretUri={Security Identifier})` schema. + +**5. Step 4 - Deploy the connector** + +Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function + +>**IMPORTANT:** Before deploying, gather the following information: +- Feedly API Token and Stream IDs + +All Azure Monitor resources (DCE, DCR, custom table, and role assignments) will be created automatically during deployment. +**Option 1 - Azure Resource Manager (ARM) Template** + + Use this method for automated deployment of the Feedly connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-Feedly-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the required parameters: + - **TableName**: Name for the custom table (default: `feedly_indicators_CL`) + - **FeedlyApiKey**: Your Feedly API token from Step 3 + - **FeedlyStreamIds**: Comma-separated list of Feedly stream IDs + - **DaysToBackfill**: Number of days to backfill (default: 7) + +>**Note**: If using Azure Key Vault secrets, use the `@Microsoft.KeyVault(SecretUri={Security Identifier})` schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + + **Option 2 - Manual Deployment of Azure Functions** + + Use the following step-by-step instructions to deploy the Feedly connector manually with Azure Functions (Deployment via Visual Studio Code). +**1. Deploy a Function App** + + **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://github.com/Azure/Azure-Sentinel/raw/refs/heads/master/Solutions/Feedly/Data%20Connectors/FeedlyAzureFunction.zip) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity Bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. FeedlyXXXX). + + e. **Select a runtime:** Choose Python 3.10. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + + **2. Configure the Function App** + + 1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **New application setting**. +3. Add each of the following application settings individually, with their respective values (case-sensitive): + - `DataCollectionEndpoint`: Will be populated automatically after DCE creation + - `DcrImmutableId`: Will be populated automatically after DCR creation + - `DcrStreamName`: `feedly_indicators_CL` + - `FeedlyApiKey`: Your Feedly API token + - `FeedlyStreamIds`: Comma-separated Feedly stream IDs + - `DaysToBackfill`: Number of days to backfill (e.g., 7) + +**Note**: The Function App uses managed identity for authentication to Azure Monitor, so no Azure AD credentials are needed. + +>**Note**: Use Azure Key Vault references for sensitive values: `@Microsoft.KeyVault(SecretUri={Security Identifier})` + +4. Once all application settings have been entered, click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `feedly_indicators_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/fireeye-network-security.md b/Tools/Solutions Analyzer/connector-docs/solutions/fireeye-network-security.md index fc5673c4675..35bb0aa95ac 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/fireeye-network-security.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/fireeye-network-security.md @@ -25,6 +25,79 @@ This solution provides **2 data connector(s)**. The [FireEye Network Security (NX)](https://www.fireeye.com/products/network-security.html) data connector provides the capability to ingest FireEye Network Security logs into Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**FireEyeNXEvent**](https://aka.ms/sentinel-FireEyeNX-parser) which is deployed with the Microsoft Sentinel Solution. +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Configure FireEye NX to send logs using CEF** + + Complete the following steps to send data using CEF: + +2.1. Log into the FireEye appliance with an administrator account + +2.2. Click **Settings** + +2.3. Click **Notifications** + +Click **rsyslog** + +2.4. Check the **Event type** check box + +2.5. Make sure Rsyslog settings are: + +- Default format: CEF + +- Default delivery: Per event + +- Default send as: Alert + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/flare.md b/Tools/Solutions Analyzer/connector-docs/solutions/flare.md index 1b5bd2d85a1..fa60a1e3749 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/flare.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/flare.md @@ -21,6 +21,31 @@ This solution provides **1 data connector(s)**. [Flare](https://flare.systems/platform/) connector allows you to receive data and intelligence from Flare on Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Required Flare permissions**: only Flare organization administrators may configure the Microsoft Sentinel integration. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Creating an Alert Channel for Microsoft Sentinel** +As an organization administrator, authenticate on [Flare](https://app.flare.systems) and access the [team page](https://app.flare.systems#/team) to create a new alert channel. + + Click on 'Create a new alert channel' and select 'Microsoft Sentinel'. Enter your Shared Key And WorkspaceID. Save the Alert Channel. + For more help and details, see our [Azure configuration documentation](https://docs.microsoft.com/azure/sentinel/connect-data-sources). + - **Workspace ID**: `{0}` + - **Primary key**: `{0} ` +**2. Associating your alert channel to an alert feed** +At this point, you may configure alerts to be sent to Microsoft Sentinel the same way that you would configure regular email alerts. + + For a more detailed guide, refer to the Flare documentation. + | | | |--------------------------|---| | **Tables Ingested** | `Firework_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/forcepoint-casb.md b/Tools/Solutions Analyzer/connector-docs/solutions/forcepoint-casb.md index afa6e800801..71e8f40a1b8 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/forcepoint-casb.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/forcepoint-casb.md @@ -25,6 +25,69 @@ This solution provides **2 data connector(s)**. The Forcepoint CASB (Cloud Access Security Broker) Connector allows you to automatically export CASB logs and events into Microsoft Sentinel in real-time. This enriches visibility into user activities across locations and cloud applications, enables further correlation with data from Azure workloads and other feeds, and improves monitoring capability with Workbooks inside Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine. + + **Step B. Forward Common Event Format (CEF) logs to Syslog agent** + + Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + +**3. Forcepoint integration installation guide** + +To complete the installation of this Forcepoint product integration, follow the guide linked below. + +[Installation Guide >](https://frcpnt.com/casb-sentinel) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/forcepoint-csg.md b/Tools/Solutions Analyzer/connector-docs/solutions/forcepoint-csg.md index bd44174e822..e6c8e9c1c2b 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/forcepoint-csg.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/forcepoint-csg.md @@ -25,6 +25,74 @@ This solution provides **2 data connector(s)**. Forcepoint Cloud Security Gateway is a converged cloud security service that provides visibility, control, and threat protection for users and data, wherever they are. For more information visit: https://www.forcepoint.com/product/cloud-security-gateway +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine. + + **Step B. Implementation options** + + The integration is made available with two implementations options. +**1. Docker Implementation** + + Leverages docker images where the integration component is already installed with all necessary dependencies. + +Follow the instructions provided in the Integration Guide linked below. + +[Integration Guide >](https://frcpnt.com/csg-sentinel) + + **2. Traditional Implementation** + + Requires the manual deployment of the integration component inside a clean Linux machine. + +Follow the instructions provided in the Integration Guide linked below. + +[Integration Guide >](https://frcpnt.com/csg-sentinel) + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF). + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/forcepoint-dlp.md b/Tools/Solutions Analyzer/connector-docs/solutions/forcepoint-dlp.md index e8b2df366bb..73c8444563b 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/forcepoint-dlp.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/forcepoint-dlp.md @@ -21,6 +21,22 @@ This solution provides **1 data connector(s)**. The Forcepoint DLP (Data Loss Prevention) connector allows you to automatically export DLP incident data from Forcepoint DLP into Microsoft Sentinel in real-time. This enriches visibility into user activities and data loss incidents, enables further correlation with data from Azure workloads and other feeds, and improves monitoring capability with Workbooks inside Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Follow step by step instructions in the [Forcepoint DLP documentation for Microsoft Sentinel](https://frcpnt.com/dlp-sentinel) to configure this connector. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `ForcepointDLPEvents_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/forcepoint-ngfw.md b/Tools/Solutions Analyzer/connector-docs/solutions/forcepoint-ngfw.md index 8cdd1e2760d..ef8553d29a2 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/forcepoint-ngfw.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/forcepoint-ngfw.md @@ -25,6 +25,69 @@ This solution provides **2 data connector(s)**. The Forcepoint NGFW (Next Generation Firewall) connector allows you to automatically export user-defined Forcepoint NGFW logs into Microsoft Sentinel in real-time. This enriches visibility into user activities recorded by NGFW, enables further correlation with data from Azure workloads and other feeds, and improves monitoring capability with Workbooks inside Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine. + + **Step B. Forward Common Event Format (CEF) logs to Syslog agent** + + Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + +**3. Forcepoint integration installation guide** + +To complete the installation of this Forcepoint product integration, follow the guide linked below. + +[Installation Guide >](https://frcpnt.com/ngfw-sentinel) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/forescout-(legacy).md b/Tools/Solutions Analyzer/connector-docs/solutions/forescout-(legacy).md index d3567ef1e4a..b698f7a0a63 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/forescout-(legacy).md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/forescout-(legacy).md @@ -21,6 +21,63 @@ This solution provides **1 data connector(s)**. The [Forescout](https://www.forescout.com/) data connector provides the capability to ingest [Forescout events](https://docs.forescout.com/bundle/syslog-3-6-1-h/page/syslog-3-6-1-h.How-to-Work-with-the-Syslog-Plugin.html) into Microsoft Sentinel. Refer to [Forescout documentation](https://docs.forescout.com/bundle/syslog-msg-3-6-tn/page/syslog-msg-3-6-tn.About-Syslog-Messages-in-Forescout.html) for more information. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**ForescoutEvent**](https://aka.ms/sentinel-forescout-parser) which is deployed with the Microsoft Sentinel Solution. + +>**NOTE:** This data connector has been developed using Forescout Syslog Plugin version: v3.6 + +**1. Install and onboard the agent for Linux or Windows** + +Install the agent on the Server where the Forescout logs are forwarded. + +> Logs from Forescout Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. + 2. Select **Apply below configuration to my machines** and select the facilities and severities. + 3. Click **Save**. +- **Open Syslog settings** + +**3. Configure Forescout event forwarding** + +Follow the configuration steps below to get Forescout logs into Microsoft Sentinel. +1. [Select an Appliance to Configure.](https://docs.forescout.com/bundle/syslog-3-6-1-h/page/syslog-3-6-1-h.Select-an-Appliance-to-Configure.html) +2. [Follow these instructions](https://docs.forescout.com/bundle/syslog-3-6-1-h/page/syslog-3-6-1-h.Send-Events-To-Tab.html#pID0E0CE0HA) to forward alerts from the Forescout platform to a syslog server. +3. [Configure](https://docs.forescout.com/bundle/syslog-3-6-1-h/page/syslog-3-6-1-h.Syslog-Triggers.html) the settings in the Syslog Triggers tab. + | | | |--------------------------|---| | **Tables Ingested** | `Syslog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/forescout-eyeinspect-for-ot-security.md b/Tools/Solutions Analyzer/connector-docs/solutions/forescout-eyeinspect-for-ot-security.md index 497b19f0386..1ec0a79944f 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/forescout-eyeinspect-for-ot-security.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/forescout-eyeinspect-for-ot-security.md @@ -21,6 +21,24 @@ This solution provides **1 data connector(s)**. Forescout eyeInspect for OT Security connector allows you to connect Asset/Alert information from Forescout eyeInspect OT platform with Microsoft Sentinel, to view and analyze data using Log Analytics Tables and Workbooks. This gives you more insight into OT organization network and improves security operation capabilities. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Forescout eyeInspect OT Microsoft Sentinel Integration** + +Instructions on how to configure Forescout eyeInspect Microsoft Sentinel Integration are provided at Forescout eyeInspect Documentation Portal +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `ForescoutOtAlert_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/forescouthostpropertymonitor.md b/Tools/Solutions Analyzer/connector-docs/solutions/forescouthostpropertymonitor.md index 6f3a6e20ba0..3e8c95b3a75 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/forescouthostpropertymonitor.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/forescouthostpropertymonitor.md @@ -21,6 +21,25 @@ This solution provides **1 data connector(s)**. The Forescout Host Property Monitor connector allows you to connect host/policy/compliance properties from Forescout platform with Microsoft Sentinel, to view, create custom incidents, and improve investigation. This gives you more insight into your organization network and improves your security operation capabilities. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Forescout Plugin requirement**: Please make sure Forescout Microsoft Sentinel plugin is running on Forescout platform + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Instructions on how to configure Forescout Microsoft Sentinel plugin are provided at Forescout Documentation Portal (https://docs.forescout.com/bundle/microsoft-sentinel-module-v2-0-0-h) +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `ForescoutComplianceStatus_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/forgerock-common-audit-for-cef.md b/Tools/Solutions Analyzer/connector-docs/solutions/forgerock-common-audit-for-cef.md index 3e2d9eccf96..4de3c04a0e1 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/forgerock-common-audit-for-cef.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/forgerock-common-audit-for-cef.md @@ -21,6 +21,63 @@ This solution provides **1 data connector(s)**. The ForgeRock Identity Platform provides a single common auditing framework. Extract and aggregate log data across the entire platform with common audit (CAUD) event handlers and unique IDs so that it can be tracked holistically. Open and extensible, you can leverage audit logging and reporting capabilities for integration with Microsoft Sentinel via this CAUD for CEF connector. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Configuration for the ForgeRock Common Audit (CAUD) for Microsoft Sentinel** + +In ForgeRock, install and configure this Common Audit (CAUD) for Microsoft Sentinel per the documentation at https://github.com/javaservlets/SentinelAuditEventHandler. Next, in Azure, follow the below CEF steps. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Common Event Format (CEF) logs to Syslog agent** + +Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/fortinet-fortigate-next-generation-firewall-connector-for-microsoft-sentinel.md b/Tools/Solutions Analyzer/connector-docs/solutions/fortinet-fortigate-next-generation-firewall-connector-for-microsoft-sentinel.md index c4f57e07011..e0fe0908759 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/fortinet-fortigate-next-generation-firewall-connector-for-microsoft-sentinel.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/fortinet-fortigate-next-generation-firewall-connector-for-microsoft-sentinel.md @@ -25,6 +25,77 @@ This solution provides **2 data connector(s)**. The Fortinet firewall connector allows you to easily connect your Fortinet logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Fortinet logs to Syslog agent** + + Set your Fortinet to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine’s IP address. + + +Copy the CLI commands below and: +- Replace "server <ip address>" with the Syslog agent's IP address. +- Set the "<facility_name>" to use the facility you configured in the Syslog agent (by default, the agent sets this to local4). +- Set the Syslog port to 514, the port your agent uses. +- To enable CEF format in early FortiOS versions, you may need to run the command "set csv disable". + +For more information, go to the [Fortinet Document Library](https://aka.ms/asi-syslog-fortinet-fortinetdocumentlibrary), choose your version, and use the "Handbook" and "Log Message Reference" PDFs. + +[Learn more >](https://aka.ms/CEF-Fortinet) + - **Set up the connection using the CLI to run the following commands:**: `config log syslogd setting + set status enable +set format cef +set port 514 +set server +end` + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/fortinet-fortindr-cloud.md b/Tools/Solutions Analyzer/connector-docs/solutions/fortinet-fortindr-cloud.md index 94ad605500c..ca2fece3ddd 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/fortinet-fortindr-cloud.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/fortinet-fortindr-cloud.md @@ -21,6 +21,50 @@ This solution provides **1 data connector(s)**. The Fortinet FortiNDR Cloud data connector provides the capability to ingest [Fortinet FortiNDR Cloud](https://docs.fortinet.com/product/fortindr-cloud) data into Microsoft Sentinel using the FortiNDR Cloud API +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **MetaStream Credentials**: **AWS Access Key Id**, **AWS Secret Access Key**, **FortiNDR Cloud Account Code** are required to retrieve event data. +- **API Credentials**: **FortiNDR Cloud API Token**, **FortiNDR Cloud Account UUID** are required to retrieve detection data. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the FortiNDR Cloud API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/Fortinet%20FortiNDR%20Cloud/Parsers/Fortinet_FortiNDR_Cloud.md) to create the Kusto function alias **Fortinet_FortiNDR_Cloud**. + +**STEP 1 - Configuration steps for the Fortinet FortiNDR Cloud Logs Collection** + +The provider should provide or link to detailed steps to configure the 'PROVIDER NAME APPLICATION NAME' API endpoint so that the Azure Function can authenticate to it successfully, get its authorization key or token, and pull the appliance's logs into Microsoft Sentinel. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Fortinet FortiNDR Cloud connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the as well as the FortiNDR Cloud API credentials (available in FortiNDR Cloud account management), readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Fortinet FortiNDR Cloud connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-FortinetFortiNDR-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**(Make sure using the same location as your Resource Group, and got the location supports Flex Consumption. +3. Enter the **Workspace ID**, **Workspace Key**, **AwsAccessKeyId**, **AwsSecretAccessKey**, and/or Other required fields. +4. Click **Create** to deploy. + | | | |--------------------------|---| | **Tables Ingested** | `FncEventsDetections_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/fortinet-fortiweb-cloud-waf-as-a-service-connector-for-microsoft-sentinel.md b/Tools/Solutions Analyzer/connector-docs/solutions/fortinet-fortiweb-cloud-waf-as-a-service-connector-for-microsoft-sentinel.md index 571f6061989..c7eef2d1227 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/fortinet-fortiweb-cloud-waf-as-a-service-connector-for-microsoft-sentinel.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/fortinet-fortiweb-cloud-waf-as-a-service-connector-for-microsoft-sentinel.md @@ -25,6 +25,60 @@ This solution provides **2 data connector(s)**. The [fortiweb](https://www.fortinet.com/products/web-application-firewall/fortiweb) data connector provides the capability to ingest Threat Analytics and events into Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Common Event Format (CEF) logs to Syslog agent** + + Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/garrison-ultra.md b/Tools/Solutions Analyzer/connector-docs/solutions/garrison-ultra.md index 8fe9f11fc4d..4fab3d6116a 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/garrison-ultra.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/garrison-ultra.md @@ -21,6 +21,31 @@ This solution provides **1 data connector(s)**. The [Garrison ULTRA](https://www.garrison.com/en/garrison-ultra-cloud-platform) Remote Logs connector allows you to ingest Garrison ULTRA Remote Logs into Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Garrison ULTRA**: To use this data connector you must have an active [Garrison ULTRA](https://www.garrison.com/en/garrison-ultra-cloud-platform) license. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Deployment - Azure Resource Manager (ARM) Template** + +These steps outline the automated deployment of the Garrison ULTRA Remote Logs data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Frefs%2Fheads%2Fmaster%2FSolutions%2FGarrison%2520ULTRA%2FData%2520Connectors%2FGarrisonULTRARemoteLogs%2Fazuredeploy_DataCollectionResources.json) +2. Provide the required details such as Resource Group, Microsoft Sentinel Workspace and ingestion configurations +> **NOTE:** It is recommended to create a new Resource Group for deployment of these resources. +3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +4. Click **Purchase** to deploy. + | | | |--------------------------|---| | **Tables Ingested** | `Garrison_ULTRARemoteLogs_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/gigamon-connector.md b/Tools/Solutions Analyzer/connector-docs/solutions/gigamon-connector.md index ff655240859..bc941a6d2d5 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/gigamon-connector.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/gigamon-connector.md @@ -20,6 +20,26 @@ This solution provides **1 data connector(s)**. Use this data connector to integrate with Gigamon Application Metadata Exporter (AMX) and get data sent directly to Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Gigamon Data Connector** + +1. Application Metadata Exporter (AMX) application converts the output from the Application Metadata Intelligence (AMI) in CEF format into JSON format and sends it to the cloud tools and Kafka. + 2. The AMX application can be deployed only on a V Series Node and can be connected to Application Metadata Intelligence running on a physical node or a virtual machine. + 3. The AMX application and the AMI are managed by GigaVUE-FM. This application is supported on VMware ESXi, VMware NSX-T, AWS and Azure. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `Gigamon_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/github.md b/Tools/Solutions Analyzer/connector-docs/solutions/github.md index 81fd244f2d1..658e19ffd35 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/github.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/github.md @@ -33,6 +33,83 @@ The [GitHub](https://www.github.com) webhook data connector provides the capabil **Note:** If you are intended to ingest Github Audit logs, Please refer to GitHub Enterprise Audit Log Connector from "**Data Connectors**" gallery. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector has been built on http trigger based Azure Function. And it provides an endpoint to which github will be connected through it's webhook capability and posts the subscribed events into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Github Webhook connector, have the Workspace ID and Workspace Primary Key (can be copied from the following). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Option 1 - Azure Resource Manager (ARM) Template** + + Use this method for automated deployment of the GitHub data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-GitHubwebhookAPI-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region and deploy. +3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + + **Option 2 - Manual Deployment of Azure Functions** + + Use the following step-by-step instructions to deploy the GitHub webhook data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + 1. Download the [Azure Function App](https://aka.ms/sentinel-GitHubWebhookAPI-functionapp) file. Extract archive to your local development computer. +2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode. +3. After successful deployment of the function app, follow next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. Go to Azure Portal for the Function App configuration. +2. In the Function App, select the Function App Name and select **Configuration**. +3. In the **Application settings** tab, select ** New application setting**. +4. Add each of the following application settings individually, with their respective string values (case-sensitive): + WorkspaceID + WorkspaceKey + logAnalyticsUri (optional) - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + +**Post Deployment steps** + +**STEP 1 - To get the Azure Function url** + + 1. Go to Azure function Overview page and Click on "Functions" in the left blade. + 2. Click on the function called "GithubwebhookConnector". + 3. Go to "GetFunctionurl" and copy the function url. + + **STEP 2 - Configure Webhook to Github Organization** + + 1. Go to [GitHub](https://www.github.com) and open your account and click on "Your Organizations." + 2. Click on Settings. + 3. Click on "Webhooks" and enter the function app url which was copied from above STEP 1 under payload URL textbox. + 4. Choose content type as "application/json". + 5. Subscribe for events and Click on "Add Webhook" + +*Now we are done with the github Webhook configuration. Once the github events triggered and after the delay of 20 to 30 mins (As there will be a dealy for LogAnalytics to spin up the resources for the first time), you should be able to see all the transactional events from the Github into LogAnalytics workspace table called "githubscanaudit_CL".* + + For more details, Click [here](https://aka.ms/sentinel-gitHubwebhooksteps) + | | | |--------------------------|---| | **Tables Ingested** | `githubscanaudit_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/gitlab.md b/Tools/Solutions Analyzer/connector-docs/solutions/gitlab.md index 6903fb36a55..63370f2a623 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/gitlab.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/gitlab.md @@ -22,6 +22,45 @@ This solution provides **1 data connector(s)**. The [GitLab](https://about.gitlab.com/solutions/devops-platform/) connector allows you to easily connect your GitLab (GitLab Enterprise Edition - Standalone) logs with Microsoft Sentinel. This gives you more security insight into your organization's DevOps pipelines. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Configuration** + +>This data connector depends on three parsers based on a Kusto Function to work as expected [**GitLab Access Logs**](https://aka.ms/sentinel-GitLabAccess-parser), [**GitLab Audit Logs**](https://aka.ms/sentinel-GitLabAudit-parser) and [**GitLab Application Logs**](https://aka.ms/sentinel-GitLabApp-parser) which are deployed with the Microsoft Sentinel Solution. + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + +1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. +2. Select **Apply below configuration to my machines** and select the facilities and severities. +3. Click **Save**. +- **Open Syslog settings** + | | | |--------------------------|---| | **Tables Ingested** | `Syslog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/google-apigee.md b/Tools/Solutions Analyzer/connector-docs/solutions/google-apigee.md index ee2d7dcfe17..0442f6ae111 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/google-apigee.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/google-apigee.md @@ -25,6 +25,53 @@ This solution provides **2 data connector(s)**. The Google ApigeeX data connector provides the capability to ingest Audit logs into Microsoft Sentinel using the Google Apigee API. Refer to [Google Apigee API](https://cloud.google.com/apigee/docs/reference/apis/apigee/rest/?apix=true) documentation for more information. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Google ApigeeX to Microsoft Sentinel** +#### 1. Setup the GCP environment + Ensure to have the following resources from the GCP Console: + Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection. + For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Apigee/Data%20Connectors/ApigeeXReadme.md) for log setup and authentication setup tutorial. + Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPApigeeLogSetup) +Authentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup) + +**Government Cloud:** +#### 1. Setup the GCP environment + Ensure to have the following resources from the GCP Console: + Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection. + For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Apigee/Data%20Connectors/ApigeeXReadme.md) for log setup and authentication setup tutorial. + Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPApigeeLogSetup) +Authentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov) +- **Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment.**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### 2. Enable ApigeeX logs + In the Google Cloud Console, enable Apigee API, if not enabled previously, and save the changes. +#### 3. Connect new collectors + To enable ApigeeX Logs for Microsoft Sentinel, click on Add new collector button, provide the required information in the pop up and click on Connect. +**GCP Collector Management** + +📊 **View GCP Collectors**: A management interface displays your configured Google Cloud Platform data collectors. + +➕ **Add New Collector**: Click "Add new collector" to configure a new GCP data connection. + +> 💡 **Portal-Only Feature**: This configuration interface is only available in the Microsoft Sentinel portal. + +**GCP Connection Configuration** + +When you click "Add new collector" in the portal, you'll be prompted to provide: +- **Project ID**: Your Google Cloud Platform project ID +- **Service Account**: GCP service account credentials with appropriate permissions +- **Subscription**: The Pub/Sub subscription to monitor for log data + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + | | | |--------------------------|---| | **Tables Ingested** | `GCPApigee` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/google-cloud-platform-audit-logs.md b/Tools/Solutions Analyzer/connector-docs/solutions/google-cloud-platform-audit-logs.md index 22a30591bc1..276806acb72 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/google-cloud-platform-audit-logs.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/google-cloud-platform-audit-logs.md @@ -21,6 +21,44 @@ This solution provides **1 data connector(s)**. The Google Cloud Platform (GCP) audit logs, ingested from Microsoft Sentinel's connector, enables you to capture three types of audit logs: admin activity logs, data access logs, and access transparency logs. Google cloud audit logs record a trail that practitioners can use to monitor access and detect potential threats across Google Cloud Platform (GCP) resources. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +#### 1. Set up your GCP environment + You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider and service account with permissions to get and consume from subscription. + Terraform provides API for the IAM that creates the resources. [Link to Terraform scripts](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation). + +**Government Cloud:** +#### 1. Set up your GCP environment + You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider and service account with permissions to get and consume from subscription. + Terraform provides API for the IAM that creates the resources. [Link to Gov Terraform scripts](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov). +- **Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment.**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### 2. Connect new collectors + To enable GCP Audit Logs for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect. +**GCP Collector Management** + +📊 **View GCP Collectors**: A management interface displays your configured Google Cloud Platform data collectors. + +➕ **Add New Collector**: Click "Add new collector" to configure a new GCP data connection. + +> 💡 **Portal-Only Feature**: This configuration interface is only available in the Microsoft Sentinel portal. + +**GCP Connection Configuration** + +When you click "Add new collector" in the portal, you'll be prompted to provide: +- **Project ID**: Your Google Cloud Platform project ID +- **Service Account**: GCP service account credentials with appropriate permissions +- **Subscription**: The Pub/Sub subscription to monitor for log data + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + | | | |--------------------------|---| | **Tables Ingested** | `GCPAuditLogs` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/google-cloud-platform-cloud-monitoring.md b/Tools/Solutions Analyzer/connector-docs/solutions/google-cloud-platform-cloud-monitoring.md index a65a933d542..8d9077751c0 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/google-cloud-platform-cloud-monitoring.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/google-cloud-platform-cloud-monitoring.md @@ -29,6 +29,99 @@ The Google Cloud Platform Cloud Monitoring data connector provides the capabilit

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

+**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **GCP service account**: GCP service account with permissions to read Cloud Monitoring metrics is required for GCP Monitoring API (required *Monitoring Viewer* role). Also json file with service account key is required. See the documentation to learn more about [creating service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts) and [creating service account key](https://cloud.google.com/iam/docs/creating-managing-service-account-keys). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the GCP API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**GCP_MONITORING**](https://aka.ms/sentinel-GCPMonitorDataConnector-parser) which is deployed with the Microsoft Sentinel Solution. + +**STEP 1 - Configuring GCP and obtaining credentials** + +1. [Create service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts) with Monitoring Viewer role and [get service account key json file](https://cloud.google.com/iam/docs/creating-managing-service-account-keys). + +2. Prepare the list of GCP projects to get metrics from. [Learn more about GCP projects](https://cloud.google.com/resource-manager/docs/cloud-platform-resource-hierarchy). + +3. Prepare the list of [GCP metric types](https://cloud.google.com/monitoring/api/metrics_gcp) + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Azure Blob Storage connection string and container name, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-GCPMonitorDataConnector-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Google Cloud Platform Project Id List**, **Google Cloud Platform Metric Types List**, **Google Cloud Platform Credentials File Content**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key** +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-GCPMonitorDataConnector-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + GCP_PROJECT_ID + GCP_METRICS + GCP_CREDENTIALS_FILE_CONTENT + WORKSPACE_ID + SHARED_KEY + logAnalyticsUri (Optional) + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `GCP_MONITORING_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/google-cloud-platform-cloud-run.md b/Tools/Solutions Analyzer/connector-docs/solutions/google-cloud-platform-cloud-run.md index def77773f3b..b0eb5582e4d 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/google-cloud-platform-cloud-run.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/google-cloud-platform-cloud-run.md @@ -21,6 +21,57 @@ This solution provides **1 data connector(s)**. The GCP Cloud Run data connector provides the capability to ingest Cloud Run request logs into Microsoft Sentinel using Pub/Sub. Refer the [Cloud Run Overview](https://cloud.google.com/run/docs/logging) for more details. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect GCP Cloud Run to Microsoft Sentinel** +#### 1. Setup the GCP environment + Ensure to have the following resources from the GCP Console: + Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection. + For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Run/Data%20Connectors/README.md) for log setup and authentication setup tutorial. + + Find the Log set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPCloudRunLogsSetup) + & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup) + +**Government Cloud:** +#### 1. Setup the GCP environment + Ensure to have the following resources from the GCP Console: + Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection. + For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Run/Data%20Connectors/README.md) for log setup and authentication setup tutorial. + + Find the Log set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPCloudRunLogsSetup) + & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov) +- **Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment.**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### 2. Enable Cloud Run logs + In the Google Cloud Console, enable cloud logging if not enabled previously, and save the changes.Deploy or update your Cloud Run services with logging enabled. + + Reference Link: [Link to documentation](https://cloud.google.com/run/docs/setup) +#### 3. Connect new collectors + To enable GCP Cloud Run Request Logs for Microsoft Sentinel, click on Add new collector button, provide the required information in the pop up and click on Connect. +**GCP Collector Management** + +📊 **View GCP Collectors**: A management interface displays your configured Google Cloud Platform data collectors. + +➕ **Add New Collector**: Click "Add new collector" to configure a new GCP data connection. + +> 💡 **Portal-Only Feature**: This configuration interface is only available in the Microsoft Sentinel portal. + +**GCP Connection Configuration** + +When you click "Add new collector" in the portal, you'll be prompted to provide: +- **Project ID**: Your Google Cloud Platform project ID +- **Service Account**: GCP service account credentials with appropriate permissions +- **Subscription**: The Pub/Sub subscription to monitor for log data + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + | | | |--------------------------|---| | **Tables Ingested** | `GCPCloudRun` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/google-cloud-platform-compute-engine.md b/Tools/Solutions Analyzer/connector-docs/solutions/google-cloud-platform-compute-engine.md index 623cb6a3771..ef4bc96446c 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/google-cloud-platform-compute-engine.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/google-cloud-platform-compute-engine.md @@ -21,6 +21,53 @@ This solution provides **1 data connector(s)**. The Google Cloud Platform Compute Engine data connector provides the capability to ingest Compute Engine Audit logs into Microsoft Sentinel using the Google Cloud Compute Engine API. Refer to [Cloud Compute Engine API](https://cloud.google.com/compute/docs/reference/rest/v1) documentation for more information. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect GCP Compute Engine to Microsoft Sentinel** +#### 1. Setup the GCP environment + Ensure to have the following resources from the GCP Console: + Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection. + For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Compute%20Engine/Data%20Connectors/GCPComputeEngineReadme.md) for log setup and authentication setup tutorial. + Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPComputeEngineLogsSetup/GCPComputeEngineLogSetup.tf) +Authentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup) + +**Government Cloud:** +#### 1. Setup the GCP environment + Ensure to have the following resources from the GCP Console: + Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection. + For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Compute%20Engine/Data%20Connectors/GCPComputeEngineReadme.md) for log setup and authentication setup tutorial. + Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPComputeEngineLogsSetup/GCPComputeEngineLogSetup.tf) +Authentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov) +- **Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment.**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### 2. Enable Compute Engine logs + In the Google Cloud Console, enable Compute Engine API, if not enabled previously, and save the changes. +#### 3. Connect new collectors + To enable Compute Engine Logs for Microsoft Sentinel, click on Add new collector button, provide the required information in the pop up and click on Connect. +**GCP Collector Management** + +📊 **View GCP Collectors**: A management interface displays your configured Google Cloud Platform data collectors. + +➕ **Add New Collector**: Click "Add new collector" to configure a new GCP data connection. + +> 💡 **Portal-Only Feature**: This configuration interface is only available in the Microsoft Sentinel portal. + +**GCP Connection Configuration** + +When you click "Add new collector" in the portal, you'll be prompted to provide: +- **Project ID**: Your Google Cloud Platform project ID +- **Service Account**: GCP service account credentials with appropriate permissions +- **Subscription**: The Pub/Sub subscription to monitor for log data + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + | | | |--------------------------|---| | **Tables Ingested** | `GCPComputeEngine` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/google-cloud-platform-firewall-logs.md b/Tools/Solutions Analyzer/connector-docs/solutions/google-cloud-platform-firewall-logs.md index 4033b609efd..c671c2c35b6 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/google-cloud-platform-firewall-logs.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/google-cloud-platform-firewall-logs.md @@ -4,11 +4,8 @@ | | | |------------------------|-------| -| **Publisher** | Microsoft Corporation | -| **Support Tier** | Microsoft | -| **Support Link** | [https://support.microsoft.com](https://support.microsoft.com) | -| **Categories** | domains | -| **First Published** | 2024-11-03 | +| **Publisher** | | +| **Support Tier** | | | **Solution Folder** | [https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Firewall%20Logs](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Firewall%20Logs) | ## Data Connectors @@ -21,6 +18,50 @@ This solution provides **1 data connector(s)**. The Google Cloud Platform (GCP) firewall logs, enable you to capture network inbound and outbound activity to monitor access and detect potential threats across Google Cloud Platform (GCP) resources. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +#### 1. Set up your GCP environment + You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider and service account with permissions to get and consume from subscription. + Terraform provides API for the IAM that creates the resources. [Link to Terraform scripts](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation) + Connector tutorial: [Link to tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup) . + +**Government Cloud:** +#### 1. Set up your GCP environment + You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider and service account with permissions to get and consume from subscription. + Terraform provides API for the IAM that creates the resources. [Link to Gov Terraform scripts](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov) + Connector tutorial: [Link to tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). +- **Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment.**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### 2. Enable Firewall logs +In your GCP account, navigate to the Firewall section. Here, you can either create a new rule or edit an existing one that you want to monitor. Once you open the rule, switch the toggle button under the **Logs** section to **On**, and save the changes. + +For more information: [Link to documentation](https://cloud.google.com/firewall/docs/using-firewall-rules-logging?_gl=1*1no0nhk*_ga*NDMxNDIxODI3LjE3MjUyNjUzMzc.*_ga_WH2QY8WWF5*MTcyNTUyNzc4MS4xMS4xLjE3MjU1MjgxNTIuNDYuMC4w) +#### 3. Connect new collectors + To enable GCP Firewall Logs for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect. +**GCP Collector Management** + +📊 **View GCP Collectors**: A management interface displays your configured Google Cloud Platform data collectors. + +➕ **Add New Collector**: Click "Add new collector" to configure a new GCP data connection. + +> 💡 **Portal-Only Feature**: This configuration interface is only available in the Microsoft Sentinel portal. + +**GCP Connection Configuration** + +When you click "Add new collector" in the portal, you'll be prompted to provide: +- **Project ID**: Your Google Cloud Platform project ID +- **Service Account**: GCP service account credentials with appropriate permissions +- **Subscription**: The Pub/Sub subscription to monitor for log data + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + | | | |--------------------------|---| | **Tables Ingested** | `GCPFirewallLogs` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/google-cloud-platform-load-balancer-logs.md b/Tools/Solutions Analyzer/connector-docs/solutions/google-cloud-platform-load-balancer-logs.md index abeed174048..faffe6e4e14 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/google-cloud-platform-load-balancer-logs.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/google-cloud-platform-load-balancer-logs.md @@ -21,6 +21,48 @@ This solution provides **1 data connector(s)**. Google Cloud Platform (GCP) Load Balancer logs provide detailed insights into network traffic, capturing both inbound and outbound activities. These logs are used for monitoring access patterns and identifying potential security threats across GCP resources. Additionally, these logs also include GCP Web Application Firewall (WAF) logs, enhancing the ability to detect and mitigate risks effectively. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +#### 1. Set up your GCP environment + You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider and service account with permissions to get and consume from subscription. + Terraform provides API for the IAM that creates the resources. [Link to Terraform scripts](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation). + +**Government Cloud:** +#### 1. Set up your GCP environment + You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider and service account with permissions to get and consume from subscription. + Terraform provides API for the IAM that creates the resources. [Link to Gov Terraform scripts](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov). +- **Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment.**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### 2. Enable Load Balancer logs +In your GCP account, navigate to the Load Balancer section. In here you can nevigate to [**Backend Service**] -> [**Edit**], once you are in the [**Backend Service**] on the [**Logging**] section **enable** the checkbox of [**Enable Logs**]. Once you open the rule, switch the toggle button under the **Logs** section to **On**, and save the changes. + +For more information: [Link to documentation](https://cloud.google.com/load-balancing/docs/https/https-logging-monitoring) +#### 3. Connect new collectors + To enable GCP Load Balancer Logs for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect. +**GCP Collector Management** + +📊 **View GCP Collectors**: A management interface displays your configured Google Cloud Platform data collectors. + +➕ **Add New Collector**: Click "Add new collector" to configure a new GCP data connection. + +> 💡 **Portal-Only Feature**: This configuration interface is only available in the Microsoft Sentinel portal. + +**GCP Connection Configuration** + +When you click "Add new collector" in the portal, you'll be prompted to provide: +- **Project ID**: Your Google Cloud Platform project ID +- **Service Account**: GCP service account credentials with appropriate permissions +- **Subscription**: The Pub/Sub subscription to monitor for log data + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + | | | |--------------------------|---| | **Tables Ingested** | `GCPLoadBalancerLogs_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/google-cloud-platform-security-command-center.md b/Tools/Solutions Analyzer/connector-docs/solutions/google-cloud-platform-security-command-center.md index 49a57d4080b..2e4fb41f762 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/google-cloud-platform-security-command-center.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/google-cloud-platform-security-command-center.md @@ -21,6 +21,40 @@ This solution provides **1 data connector(s)**. The Google Cloud Platform (GCP) Security Command Center is a comprehensive security and risk management platform for Google Cloud, ingested from Sentinel's connector. It offers features such as asset inventory and discovery, vulnerability and threat detection, and risk mitigation and remediation to help you gain insight into your organization's security and data attack surface. This integration enables you to perform tasks related to findings and assets more effectively. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +#### 1. Set up your GCP environment + You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider and service account with permissions to get and consume from subscription. + Terraform provides API for the IAM that creates the resources. [Link to Terraform scripts](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation). +- **Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment.**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### 2. Connect new collectors + To enable GCP SCC for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect. +**GCP Collector Management** + +📊 **View GCP Collectors**: A management interface displays your configured Google Cloud Platform data collectors. + +➕ **Add New Collector**: Click "Add new collector" to configure a new GCP data connection. + +> 💡 **Portal-Only Feature**: This configuration interface is only available in the Microsoft Sentinel portal. + +**GCP Connection Configuration** + +When you click "Add new collector" in the portal, you'll be prompted to provide: +- **Project ID**: Your Google Cloud Platform project ID +- **Service Account**: GCP service account credentials with appropriate permissions +- **Subscription**: The Pub/Sub subscription to monitor for log data + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + | | | |--------------------------|---| | **Tables Ingested** | `GoogleCloudSCC` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/google-cloud-platform-vpc-flow-logs.md b/Tools/Solutions Analyzer/connector-docs/solutions/google-cloud-platform-vpc-flow-logs.md index 7c59d985e80..75ca3cf592c 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/google-cloud-platform-vpc-flow-logs.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/google-cloud-platform-vpc-flow-logs.md @@ -21,6 +21,52 @@ This solution provides **1 data connector(s)**. The Google Cloud Platform (GCP) VPC Flow Logs enable you to capture network traffic activity at the VPC level, allowing you to monitor access patterns, analyze network performance, and detect potential threats across GCP resources. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +#### 1. Set up your GCP environment + You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription. + To configure this data connector, execute the following Terraform scripts: + 1. Setup Required Resources: [Configuration Guide](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPVPCFlowLogsSetup/readme.md) + 2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector , kindly skip this step and use the existing service account and workload identity pool. + +**Government Cloud:** +#### 1. Set up your GCP environment + You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription. + To configure this data connector, execute the following Terraform scripts: + 1. Setup Required Resources: [Configuration Guide]https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPVPCFlowLogsSetup/readme.md) + 2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector , kindly skip this step and use the existing service account and workload identity pool. +- **Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment.**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### 2. Enable VPC Flow Logs +In your GCP account, navigate to the VPC network section. Select the subnet you want to monitor and enable Flow Logs under the Logging section. + +For more information: [Google Cloud Documentation](https://cloud.google.com/vpc/docs/using-flow-logs) +#### 3. Connect new collectors + To enable GCP VPC Flow Logs for Microsoft Sentinel, click the Add new collector button, fill in the required information in the context pane, and click Connect. +**GCP Collector Management** + +📊 **View GCP Collectors**: A management interface displays your configured Google Cloud Platform data collectors. + +➕ **Add New Collector**: Click "Add new collector" to configure a new GCP data connection. + +> 💡 **Portal-Only Feature**: This configuration interface is only available in the Microsoft Sentinel portal. + +**GCP Connection Configuration** + +When you click "Add new collector" in the portal, you'll be prompted to provide: +- **Project ID**: Your Google Cloud Platform project ID +- **Service Account**: GCP service account credentials with appropriate permissions +- **Subscription**: The Pub/Sub subscription to monitor for log data + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + | | | |--------------------------|---| | **Tables Ingested** | `GCPVPCFlow` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/google-kubernetes-engine.md b/Tools/Solutions Analyzer/connector-docs/solutions/google-kubernetes-engine.md index f26e738f0b6..90557f22c64 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/google-kubernetes-engine.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/google-kubernetes-engine.md @@ -21,6 +21,45 @@ This solution provides **1 data connector(s)**. The Google Kubernetes Engine (GKE) Logs enable you to capture cluster activity, workload behavior, and security events, allowing you to monitor Kubernetes workloads, analyze performance, and detect potential threats across GKE clusters. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +#### 1. Set up your GCP environment +You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription. + +To configure this data connector, execute the following Terraform scripts: + +1. Setup Required Resources: [Configuration Guide](https://github.com/Alekhya0824/GithubValidationREPO/blob/main/gke/Readme.md) +2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector, kindly skip this step and use the existing service account and workload identity pool. +- **Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment.**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### 2. Enable Kubernetes Engine Logging +In your GCP account, navigate to the Kubernetes Engine section. Enable Cloud Logging for your clusters. Within Cloud Logging, ensure that the specific logs you want to ingest—such as API server, scheduler, controller manager, HPA decision, and application logs—are enabled for effective monitoring and security analysis. +#### 3. Connect new collectors +To enable GKE Logs for Microsoft Sentinel, click the **Add new collector** button, fill in the required information in the context pane, and click **Connect**. +**GCP Collector Management** + +📊 **View GCP Collectors**: A management interface displays your configured Google Cloud Platform data collectors. + +➕ **Add New Collector**: Click "Add new collector" to configure a new GCP data connection. + +> 💡 **Portal-Only Feature**: This configuration interface is only available in the Microsoft Sentinel portal. + +**GCP Connection Configuration** + +When you click "Add new collector" in the portal, you'll be prompted to provide: +- **Project ID**: Your Google Cloud Platform project ID +- **Service Account**: GCP service account credentials with appropriate permissions +- **Subscription**: The Pub/Sub subscription to monitor for log data + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + | | | |--------------------------|---| | **Tables Ingested** | `GKEAPIServer` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/googlecloudplatformcdn.md b/Tools/Solutions Analyzer/connector-docs/solutions/googlecloudplatformcdn.md index 92bfd0a05cd..ab40588889d 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/googlecloudplatformcdn.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/googlecloudplatformcdn.md @@ -21,6 +21,57 @@ This solution provides **1 data connector(s)**. The Google Cloud Platform CDN data connector provides the capability to ingest Cloud CDN Audit logs and Cloud CDN Traffic logs into Microsoft Sentinel using the Compute Engine API. Refer the [Product overview](https://cloud.google.com/cdn/docs/overview) document for more details. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect GCP CDN to Microsoft Sentinel** +#### 1. Setup the GCP environment + Ensure to have the following resources from the GCP Console: + Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection. + For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformCDN/Data%20Connectors/README.md) for log setup and authentication setup tutorial. + + Find the Log set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPCDNLogsSetup) + & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup) + +**Government Cloud:** +#### 1. Setup the GCP environment + Ensure to have the following resources from the GCP Console: + Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection. + For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformCDN/Data%20Connectors/README.md) for log setup and authentication setup tutorial. + + Find the Log set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPCDNLogsSetup) + & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov) +- **Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment.**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### 2. Enable CDN logs + In the Google Cloud Console, enable cloud logging if not enabled previously, and save the changes. Navigate to Cloud CDN section and click on Add origin to create backends as per link provided below. + + Reference Link: [Link to documentation](https://cloud.google.com/cdn/docs/using-cdn) +#### 3. Connect new collectors + To enable GCP Cloud CDN Logs for Microsoft Sentinel, click on Add new collector button, provide the required information in the pop up and click on Connect. +**GCP Collector Management** + +📊 **View GCP Collectors**: A management interface displays your configured Google Cloud Platform data collectors. + +➕ **Add New Collector**: Click "Add new collector" to configure a new GCP data connection. + +> 💡 **Portal-Only Feature**: This configuration interface is only available in the Microsoft Sentinel portal. + +**GCP Connection Configuration** + +When you click "Add new collector" in the portal, you'll be prompted to provide: +- **Project ID**: Your Google Cloud Platform project ID +- **Service Account**: GCP service account credentials with appropriate permissions +- **Subscription**: The Pub/Sub subscription to monitor for log data + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + | | | |--------------------------|---| | **Tables Ingested** | `GCPCDN` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/googlecloudplatformdns.md b/Tools/Solutions Analyzer/connector-docs/solutions/googlecloudplatformdns.md index d088f47d096..ca415b2b87d 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/googlecloudplatformdns.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/googlecloudplatformdns.md @@ -25,6 +25,56 @@ This solution provides **2 data connector(s)**. The Google Cloud Platform DNS data connector provides the capability to ingest Cloud DNS Query logs and Cloud DNS Audit logs into Microsoft Sentinel using the Google Cloud DNS API. Refer to [Cloud DNS API](https://cloud.google.com/dns/docs/reference/rest/v1) documentation for more information. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect GCP DNS to Microsoft Sentinel** +>**NOTE:** If both Azure Function and CCP connector are running simultaneously, duplicate data is populated in the tables. +#### 1. Setup the GCP environment + Ensure to have the following resources from the GCP Console: + Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection. + For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformDNS/Data%20Connectors/README.md) for log setup and authentication setup tutorial. + Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPDNS_CCPLogsSetup) +Authentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup) + +**Government Cloud:** +#### 1. Setup the GCP environment + Ensure to have the following resources from the GCP Console: + Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection. + For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformDNS/Data%20Connectors/README.md) for log setup and authentication setup tutorial. + Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPDNS_CCPLogsSetupGov) +Authentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov) +- **Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment.**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### 2. Enable DNS logs + In the Google Cloud Console, navigate to Cloud DNS Section. Enable cloud logging if not enabled previously, and save the changes. Here, you can manage the existing zones, or create a new zone and create policies for the zone which you want to monitor. + +For more information: [Link to documentation](https://cloud.google.com/dns/docs/zones/zones-overview) +#### 3. Connect new collectors + To enable GCP DNS Logs for Microsoft Sentinel, click on Add new collector button, provide the required information in the pop up and click on Connect. +**GCP Collector Management** + +📊 **View GCP Collectors**: A management interface displays your configured Google Cloud Platform data collectors. + +➕ **Add New Collector**: Click "Add new collector" to configure a new GCP data connection. + +> 💡 **Portal-Only Feature**: This configuration interface is only available in the Microsoft Sentinel portal. + +**GCP Connection Configuration** + +When you click "Add new collector" in the portal, you'll be prompted to provide: +- **Project ID**: Your Google Cloud Platform project ID +- **Service Account**: GCP service account credentials with appropriate permissions +- **Subscription**: The Pub/Sub subscription to monitor for log data + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + | | | |--------------------------|---| | **Tables Ingested** | `GCPDNS` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/googlecloudplatformiam.md b/Tools/Solutions Analyzer/connector-docs/solutions/googlecloudplatformiam.md index 4c77149d7d1..f7bd5d1df28 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/googlecloudplatformiam.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/googlecloudplatformiam.md @@ -29,6 +29,100 @@ The Google Cloud Platform Identity and Access Management (IAM) data connector pr

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

+**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **GCP service account**: GCP service account with permissions to read logs is required for GCP Logging API. Also json file with service account key is required. See the documentation to learn more about [required permissions](https://cloud.google.com/iam/docs/audit-logging#audit_log_permissions), [creating service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts) and [creating service account key](https://cloud.google.com/iam/docs/creating-managing-service-account-keys). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the GCP API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**GCP_IAM**](https://aka.ms/sentinel-GCPIAMDataConnector-parser) which is deployed with the Microsoft Sentinel Solution. + +**STEP 1 - Configuring GCP and obtaining credentials** + +1. Make sure that Logging API is [enabled](https://cloud.google.com/apis/docs/getting-started#enabling_apis). + +2. (Optional) [Enable Data Access Audit logs](https://cloud.google.com/logging/docs/audit/configure-data-access#config-console-enable). + +3. [Create service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts) with [required permissions](https://cloud.google.com/iam/docs/audit-logging#audit_log_permissions) and [get service account key json file](https://cloud.google.com/iam/docs/creating-managing-service-account-keys). + +4. Prepare the list of GCP resources (organizations, folders, projects) to get logs from. [Learn more about GCP resources](https://cloud.google.com/resource-manager/docs/cloud-platform-resource-hierarchy). + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Azure Blob Storage connection string and container name, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-GCPIAMDataConnector-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Google Cloud Platform Resource Names**, **Google Cloud Platform Credentials File Content**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key** +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-GCPIAMDataConnector-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + RESOURCE_NAMES + CREDENTIALS_FILE_CONTENT + WORKSPACE_ID + SHARED_KEY + logAnalyticsUri (Optional) + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `GCP_IAM_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/googlecloudplatformids.md b/Tools/Solutions Analyzer/connector-docs/solutions/googlecloudplatformids.md index 4f4fd7eba0d..f3a2c75b656 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/googlecloudplatformids.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/googlecloudplatformids.md @@ -21,6 +21,57 @@ This solution provides **1 data connector(s)**. The Google Cloud Platform IDS data connector provides the capability to ingest Cloud IDS Traffic logs, Threat logs and Audit logs into Microsoft Sentinel using the Google Cloud IDS API. Refer to [Cloud IDS API](https://cloud.google.com/intrusion-detection-system/docs/audit-logging#google.cloud.ids.v1.IDS) documentation for more information. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect GCP Cloud IDS to Microsoft Sentinel** +#### 1. Setup the GCP environment + Ensure to have the following resources from the GCP Console: + Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection. + For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformIDS/Data%20Connectors/README.md) for log setup and authentication setup tutorial. + + Find the Log set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPCloudIDSLogSetup) + & the Authentication set up script: [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup) + +**Government Cloud:** +#### 1. Setup the GCP environment + Ensure to have the following resources from the GCP Console: + Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection. + For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformIDS/Data%20Connectors/README.md) for log setup and authentication setup tutorial. + + Find the Log set up script: [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPCloudIDSLogSetup) + & the Authentication set up script: [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov) +- **Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment.**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### 2. Enable IDS logs + In the Google Cloud Console, enable Cloud IDS API, if not enabled previously. Create an IDS Endpoint and save the changes. + +For more information on how to create and configure an IDS endpoint: [Link to documentation](https://cloud.google.com/intrusion-detection-system/docs/configuring-ids) +#### 3. Connect new collectors + To enable GCP IDS Logs for Microsoft Sentinel, click on Add new collector button, provide the required information in the pop up and click on Connect. +**GCP Collector Management** + +📊 **View GCP Collectors**: A management interface displays your configured Google Cloud Platform data collectors. + +➕ **Add New Collector**: Click "Add new collector" to configure a new GCP data connection. + +> 💡 **Portal-Only Feature**: This configuration interface is only available in the Microsoft Sentinel portal. + +**GCP Connection Configuration** + +When you click "Add new collector" in the portal, you'll be prompted to provide: +- **Project ID**: Your Google Cloud Platform project ID +- **Service Account**: GCP service account credentials with appropriate permissions +- **Subscription**: The Pub/Sub subscription to monitor for log data + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + | | | |--------------------------|---| | **Tables Ingested** | `GCPIDS` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/googlecloudplatformnat.md b/Tools/Solutions Analyzer/connector-docs/solutions/googlecloudplatformnat.md index 1baeddf7126..e7c5c71ac17 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/googlecloudplatformnat.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/googlecloudplatformnat.md @@ -21,6 +21,57 @@ This solution provides **1 data connector(s)**. The Google Cloud Platform NAT data connector provides the capability to ingest Cloud NAT Audit logs and Cloud NAT Traffic logs into Microsoft Sentinel using the Compute Engine API. Refer the [Product overview](https://cloud.google.com/nat/docs/overview) document for more details. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect GCP NAT to Microsoft Sentinel** +#### 1. Setup the GCP environment + Ensure to have the following resources from the GCP Console: + Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection. + For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformNAT/Data%20Connectors/README.md) for log setup and authentication setup tutorial. + + Find the Log set up script [**here**](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPCloudNATLogsSetup/GCPCloudNATLogsSetup.tf) + & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup) + +**Government Cloud:** +#### 1. Setup the GCP environment + Ensure to have the following resources from the GCP Console: + Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection. + For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformNAT/Data%20Connectors/README.md) for log setup and authentication setup tutorial. + + Find the Log set up script [**here**](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPCloudNATLogsSetup/GCPCloudNATLogsSetup.tf) + & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov) +- **Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment.**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### 2. Enable NAT logs + In the Google Cloud Console, enable cloud logging if not enabled previously, and save the changes. Navigate to Cloud NAT section and click on Add origin to create backends as per link provided below. + + Reference Link: [Link to documentation](https://cloud.google.com/nat/docs/monitoring) +#### 3. Connect new collectors + To enable GCP Cloud NAT Logs for Microsoft Sentinel, click on Add new collector button, provide the required information in the pop up and click on Connect. +**GCP Collector Management** + +📊 **View GCP Collectors**: A management interface displays your configured Google Cloud Platform data collectors. + +➕ **Add New Collector**: Click "Add new collector" to configure a new GCP data connection. + +> 💡 **Portal-Only Feature**: This configuration interface is only available in the Microsoft Sentinel portal. + +**GCP Connection Configuration** + +When you click "Add new collector" in the portal, you'll be prompted to provide: +- **Project ID**: Your Google Cloud Platform project ID +- **Service Account**: GCP service account credentials with appropriate permissions +- **Subscription**: The Pub/Sub subscription to monitor for log data + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + | | | |--------------------------|---| | **Tables Ingested** | `GCPNAT` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/googlecloudplatformresourcemanager.md b/Tools/Solutions Analyzer/connector-docs/solutions/googlecloudplatformresourcemanager.md index 1d3e0237446..7da12f08b88 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/googlecloudplatformresourcemanager.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/googlecloudplatformresourcemanager.md @@ -21,6 +21,55 @@ This solution provides **1 data connector(s)**. The Google Cloud Platform Resource Manager data connector provides the capability to ingest Resource Manager [Admin Activity and Data Access Audit logs](https://cloud.google.com/resource-manager/docs/audit-logging) into Microsoft Sentinel using the Cloud Resource Manager API. Refer the [Product overview](https://cloud.google.com/resource-manager/docs/cloud-platform-resource-hierarchy) document for more details. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect GCP Resource Manager to Microsoft Sentinel** +#### 1. Setup the GCP environment + Ensure to have the following resources from the GCP Console: + Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection. + For more information, refer the [Connector tutorial](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleCloudPlatformResourceManager/Data%20Connectors/README.md) for log setup and authentication setup tutorial. + + Find the Log set up script [**here**](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPResourceManagerLogsSetup/GCPResourceManagerLogSetup.tf) + & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup) + +**Government Cloud:** +#### 1. Setup the GCP environment + Ensure to have the following resources from the GCP Console: + Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection. + For more information, refer the [Connector tutorial](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleCloudPlatformResourceManager/Data%20Connectors/README.md) for log setup and authentication setup tutorial. + + Find the Log set up script [**here**](https://raw.githubusercontent.com/Azure/Azure-Sentinel/c1cb589dad1add228f78e629073a9b069ce52991/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPResourceManagerLogsSetup/GCPResourceManagerLogSetup.tf) + & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov) +- **Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment.**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### 2. Enable Resource Manager logs + In the Google Cloud Console, enable cloud resource manager API if not enabled previously, and save the changes. Make sure to have organization level IAM permissions for your account to see all logs in the resource hierarchy. You can refer the document links for different IAM permissions for access control with IAM at each level provided in this [link](https://cloud.google.com/resource-manager/docs/how-to) +#### 3. Connect new collectors + To enable GCP Resource Manager Logs for Microsoft Sentinel, click on Add new collector button, provide the required information in the pop up and click on Connect. +**GCP Collector Management** + +📊 **View GCP Collectors**: A management interface displays your configured Google Cloud Platform data collectors. + +➕ **Add New Collector**: Click "Add new collector" to configure a new GCP data connection. + +> 💡 **Portal-Only Feature**: This configuration interface is only available in the Microsoft Sentinel portal. + +**GCP Connection Configuration** + +When you click "Add new collector" in the portal, you'll be prompted to provide: +- **Project ID**: Your Google Cloud Platform project ID +- **Service Account**: GCP service account credentials with appropriate permissions +- **Subscription**: The Pub/Sub subscription to monitor for log data + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + | | | |--------------------------|---| | **Tables Ingested** | `GCPResourceManager` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/googlecloudplatformsql.md b/Tools/Solutions Analyzer/connector-docs/solutions/googlecloudplatformsql.md index b8895aee06d..6f07a01dc7a 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/googlecloudplatformsql.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/googlecloudplatformsql.md @@ -21,6 +21,52 @@ This solution provides **1 data connector(s)**. The GCP Cloud SQL data connector provides the capability to ingest Audit logs into Microsoft Sentinel using the GCP Cloud SQL API. Refer to [GCP cloud SQL Audit Logs](https://cloud.google.com/sql/docs/mysql/audit-logging) documentation for more information. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect GCP Cloud SQL to Microsoft Sentinel** +#### 1. Setup the GCP environment + Ensure to have the following resources from the GCP Console: + Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection. + For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformSQL/Data%20Connectors/Readme.md) for log setup and authentication setup tutorial. + Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPCloudSQLLogsSetup/GCPCloudSQLLogsSetup.tf) +Authentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup) + +**Government Cloud:** +#### 1. Setup the GCP environment + Ensure to have the following resources from the GCP Console: + Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection. + For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformSQL/Data%20Connectors/Readme.md) for log setup and authentication setup tutorial. + Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPCloudSQLLogsSetup/GCPCloudSQLLogsSetup.tf) +Authentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov) +- **Tenant ID: A unique identifier that is used as an input in the terraform configuration within a GCP environment.**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +#### 2. In the Google Cloud Console, enable Cloud SQL API, if not enabled previously, and save the changes. +#### 3. Connect new collectors + To enable GCP Cloud SQL Logs for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect. +**GCP Collector Management** + +📊 **View GCP Collectors**: A management interface displays your configured Google Cloud Platform data collectors. + +➕ **Add New Collector**: Click "Add new collector" to configure a new GCP data connection. + +> 💡 **Portal-Only Feature**: This configuration interface is only available in the Microsoft Sentinel portal. + +**GCP Connection Configuration** + +When you click "Add new collector" in the portal, you'll be prompted to provide: +- **Project ID**: Your Google Cloud Platform project ID +- **Service Account**: GCP service account credentials with appropriate permissions +- **Subscription**: The Pub/Sub subscription to monitor for log data + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + | | | |--------------------------|---| | **Tables Ingested** | `GCPCloudSQL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/googleworkspacereports.md b/Tools/Solutions Analyzer/connector-docs/solutions/googleworkspacereports.md index 13f7ed3db23..9c191784c67 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/googleworkspacereports.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/googleworkspacereports.md @@ -29,6 +29,125 @@ The [Google Workspace](https://workspace.google.com/) data connector provides th

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

+**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. + +**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String** + +1. [Python 3 or above](https://www.python.org/downloads/) is installed. +2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available. +3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1). +4. A Google account in that domain with administrator privileges. + +**STEP 2 - Configuration steps for the Google Reports API** + +1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com. +2. Using the search option (available at the top middle), Search for ***APIs & Services*** +3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project. + 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps: + 1. Provide App Name and other mandatory information. + 2. Add authorized domains with API Access Enabled. + 3. In Scopes section, add **Admin SDK API** scope. + 4. In Test Users section, make sure the domain admin account is added. + 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID + 1. Click on Create Credentials on the top and select Oauth client Id. + 2. Select Web Application from the Application Type drop down. + 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs. + 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to "**credentials.json**". + 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved. + 1. When popped up for sign-in, use the domain admin account credentials to login. +>**Note:** This script is supported only on Windows operating system. + 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step. + +**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**6. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Google Workspace data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**7. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX). + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select ** New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + GooglePickleString + WorkspaceID + WorkspaceKey + logAnalyticsUri (optional) +4. (Optional) Change the default delays if required. + + > **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. + Fetch Delay - 10 minutes + Calendar Fetch Delay - 6 hours + Chat Fetch Delay - 1 day + User Accounts Fetch Delay - 3 hours + Login Fetch Delay - 6 hours + +5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +6. Once all application settings have been entered, click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `GWorkspace_ReportsAPI_access_transparency_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/greynoisethreatintelligence.md b/Tools/Solutions Analyzer/connector-docs/solutions/greynoisethreatintelligence.md index effaa2b5efb..39903ee78b6 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/greynoisethreatintelligence.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/greynoisethreatintelligence.md @@ -22,6 +22,64 @@ This solution provides **1 data connector(s)**. This Data Connector installs an Azure Function app to download GreyNoise indicators once per day and inserts them into the ThreatIntelligenceIndicator table in Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permissions are required. +- **Workspace** (Workspace): read and write permissions on the workspace are required. + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **GreyNoise API Key**: Retrieve your GreyNoise API Key [here](https://viz.greynoise.io/account/api-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. You can connect GreyNoise Threat Intelligence to Microsoft Sentinel by following the below steps:** + +> The following steps create an Azure AAD application, retrieves a GreyNoise API key, and saves the values in an Azure Function App Configuration. + +**1. Retrieve your API Key from GreyNoise Visualizer.** + +Generate an API key from GreyNoise Visualizer https://docs.greynoise.io/docs/using-the-greynoise-api + +**2. In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID and Client ID. Also, get the Log Analytics Workspace ID associated with your Microsoft Sentinel instance (it should display below).** + +Follow the instructions here to create your Azure AAD app and save your Client ID and Tenant ID: https://learn.microsoft.com/en-us/azure/sentinel/connect-threat-intelligence-upload-api#instructions + NOTE: Wait until step 5 to generate your client secret. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Assign the AAD application the Microsoft Sentinel Contributor Role.** + +Follow the instructions here to add the Microsoft Sentinel Contributor Role: https://learn.microsoft.com/en-us/azure/sentinel/connect-threat-intelligence-upload-api#assign-a-role-to-the-application + +**4. Specify the AAD permissions to enable MS Graph API access to the upload-indicators API.** + +Follow this section here to add **'ThreatIndicators.ReadWrite.OwnedBy'** permission to the AAD App: https://learn.microsoft.com/en-us/azure/sentinel/connect-threat-intelligence-tip#specify-the-permissions-required-by-the-application. + Back in your AAD App, ensure you grant admin consent for the permissions you just added. + Finally, in the 'Tokens and APIs' section, generate a client secret and save it. You will need it in Step 6. + +**5. Deploy the Threat Intelligence (Preview) Solution, which includes the Threat Intelligence Upload Indicators API (Preview)** + +See Microsoft Sentinel Content Hub for this Solution, and install it in the Microsoft Sentinel instance. + +**6. Deploy the Azure Function** + +Click the Deploy to Azure button. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-GreyNoise-azuredeploy) + + Fill in the appropriate values for each parameter. **Be aware** that the only valid values for the **GREYNOISE_CLASSIFICATIONS** parameter are **benign**, **malicious** and/or **unknown**, which must be comma-separated. + +**7. Send indicators to Sentinel** + +The function app installed in Step 6 queries the GreyNoise GNQL API once per day, and submits each indicator found in STIX 2.1 format to the [Microsoft Upload Threat Intelligence Indicators API](https://learn.microsoft.com/en-us/azure/sentinel/upload-indicators-api). + Each indicator expires in ~24 hours from creation unless found on the next day's query. In this case the TI Indicator's **Valid Until** time is extended for another 24 hours, which keeps it active in Microsoft Sentinel. + + For more information on the GreyNoise API and the GreyNoise Query Language (GNQL), [click here](https://developer.greynoise.io/docs/using-the-greynoise-api). + | | | |--------------------------|---| | **Tables Ingested** | `ThreatIntelligenceIndicator` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/holmsecurity.md b/Tools/Solutions Analyzer/connector-docs/solutions/holmsecurity.md index f7d5507739f..060dd605ce2 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/holmsecurity.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/holmsecurity.md @@ -21,6 +21,51 @@ This solution provides **1 data connector(s)**. The connector provides the capability to poll data from Holm Security Center into Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Holm Security API Token**: Holm Security API Token is required. [Holm Security API Token](https://support.holmsecurity.com/) + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to a Holm Security Assets to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Configuration steps for the Holm Security API** + + [Follow these instructions](https://support.holmsecurity.com/knowledge/how-do-i-set-up-an-api-token) to create an API authentication token. + +**STEP 2 - Use the below deployment option to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Holm Security connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Holm Security API authorization Token, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Azure Resource Manager (ARM) Template Deployment** + +**Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Holm Security connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-holmsecurityassets-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password**, 'and/or Other required fields'. +>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + | | | |--------------------------|---| | **Tables Ingested** | `net_assets_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/hyas-protect.md b/Tools/Solutions Analyzer/connector-docs/solutions/hyas-protect.md index 529a49a400c..9ad60dcf927 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/hyas-protect.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/hyas-protect.md @@ -21,6 +21,83 @@ This solution provides **1 data connector(s)**. HYAS Protect provide logs based on reputation values - Blocked, Malicious, Permitted, Suspicious. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **HYAS API Key** is required for making API calls. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the HYAS API to pull Logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**1. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the HYAS Protect data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-HYASProtect-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Function Name**, **Table Name**, **Workspace ID**, **Workspace Key**, **API Key**, **TimeInterval**, **FetchBlockedDomains**, **FetchMaliciousDomains**, **FetchSuspiciousDomains**, **FetchPermittedDomains** and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**2. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the HYAS Protect Logs data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> NOTE:You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-HYASProtect-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. HyasProtectLogsXXX). + + e. **Select a runtime:** Choose Python 3.8. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + APIKey + Polling + WorkspaceID + WorkspaceKey +. Once all application settings have been entered, click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `HYASProtectDnsSecurityLogs_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/iboss.md b/Tools/Solutions Analyzer/connector-docs/solutions/iboss.md index a971d6b83b6..a99e250a10d 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/iboss.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/iboss.md @@ -25,6 +25,65 @@ This solution provides **2 data connector(s)**. The [iboss](https://www.iboss.com) data connector enables you to seamlessly connect your Threat Console to Microsoft Sentinel and enrich your instance with iboss URL event logs. Our logs are forwarded in Common Event Format (CEF) over Syslog and the configuration required can be completed on the iboss platform without the use of a proxy. Take advantage of our connector to garner critical data points and gain insight into security threats. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Configure AMA Data Connector** + +Steps to configure the iboss AMA Data Connector +**Kindly follow the steps to configure the data connector** + +**Step A. Gather Required Configuration Details in Azure Arc** + + 1. Navigate to Azure Arc ---> Azure Arc Resources ---> Machines. + +2. Add a machine ---> Add a single server ---> Generate script. + +3. Select the resource group, this should be the same group as the Log Analytics Workspace for your Microsoft Sentinel instance you will be using + +4. Select a region and ensure it is in the same region as your Log Analytics Workspace + +5. Select Linux as Operating System + +6. Click Next + +7. Download the script and use this information for the next step when configuring your Microsoft Sentinel AMA integration iboss side. + +8. Navigate to the Log Analytics Workspace of your Microsoft Sentinel instance and find it's resource group, workspace name, and workspace id + + **Step B. Forward Common Event Format (CEF) logs** + + Set your Threat Console to send Syslog messages in CEF format to your Azure workspace. (Ensure you have the information gathered from the previous section) + +>1. Navigate to the Integrations Marketplace inside your iboss Console + +>2. Select Microsoft Sentinel AMA Log Forwarding + +>3. Select Add Integration + +4. Use the information from the script and your log analytics workspace to configure the integration. + +5. Add the integration + +>6. An email with be sent to your iboss alerts email to authenticate. Please do so within five minutes + +7. After authenticating, wait 15 to 20 minutes and ensure the Microsoft Sentinel Status of your integration is successful. + + **Step C. Validate connection** + + 1. Follow the instructions to validate your connectivity: + +2. Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +3. It may take about 20 minutes until the connection streams data to your workspace. + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/illumio-core.md b/Tools/Solutions Analyzer/connector-docs/solutions/illumio-core.md index 276b718b029..184168dafa1 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/illumio-core.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/illumio-core.md @@ -25,6 +25,81 @@ This solution provides **2 data connector(s)**. The [Illumio Core](https://www.illumio.com/products/) data connector provides the capability to ingest Illumio Core logs into Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias IllumioCoreEvent and load the function code or click [here](https://aka.ms/sentinel-IllumioCore-parser).The function usually takes 10-15 minutes to activate after solution installation/update and maps Illumio Core events to Microsoft Sentinel Information Model (ASIM). +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine. + + **Step B. Configure Ilumio Core to send logs using CEF** + + Configure Event Format + + 1. From the PCE web console menu, choose **Settings > Event Settings** to view your current settings. + + 2. Click **Edit** to change the settings. + + 3. Set **Event Format** to CEF. + + 4. (Optional) Configure **Event Severity** and **Retention Period**. + +Configure event forwarding to an external syslog server + + 1. From the PCE web console menu, choose **Settings > Event Settings**. + + 2. Click **Add**. + + 3. Click **Add Repository**. + + 4. Complete the **Add Repository** dialog. + + 5. Click **OK** to save the event forwarding configuration. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/illumio-insight.md b/Tools/Solutions Analyzer/connector-docs/solutions/illumio-insight.md index 32255626e25..b582c36e9ab 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/illumio-insight.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/illumio-insight.md @@ -25,6 +25,46 @@ This solution provides **2 data connector(s)**. The Illumio Insights Summary connector Publishes AI-powered threat discovery and anomaly reports generated by the Illumio Insights Agent. Leveraging the MITRE ATT&CK framework, these reports surface high-fidelity insights into emerging threats and risky behaviors, directly into the Data Lake. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Configuration** + +Configure the Illumio Insights Summary connector. +> This data connector may take 24 hrs for the latest report after onboarding +#### Configuration steps for the Illumio Insights Summary Connector + +**Prerequisites** +- Register and Login to Illumio Console with valid credentials +- Purchase Illumio Insights or Start a free Trial for Illumio Insights +- Enable The Illumio Insights Agent + +**Step 1: Register the Service Account** +1. Go to **Illumio Console → Access → Service Accounts** +2. Create a service account for the tenant +3. Once you create a service account, you will receive the client credentials +4. Copy the **auth_username** (Illumio Insights API Key) and the **Secret** (API Secret) + +**Step 2: Add Client Credentials to Sentinel Account** +- Add the API key and secret to Sentinel Account for tenant authentication +- These credentials will be used to authenticate calls to the Illumio SaaS API + +Please fill in the required fields below with the credentials obtained from the Illumio Console: +- **Illumio Insights Api Key**: (password field) +- **Api Secret**: (password field) +- **Illumio Tenant ID**: {IllumioTenantId - Optional} + +**2. Connect** + +Enable the Illumio Insights Summary connector. +- Click 'Connect' to establish connection + | | | |--------------------------|---| | **Tables Ingested** | `IllumioInsightsSummary_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/illumiosaas.md b/Tools/Solutions Analyzer/connector-docs/solutions/illumiosaas.md index 6edeb2c86ad..0f0cec1d943 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/illumiosaas.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/illumiosaas.md @@ -25,6 +25,51 @@ This solution provides **2 data connector(s)**. The Illumio Saas Cloud data connector provides the capability to ingest Flow logs into Microsoft Sentinel using the Illumio Saas Log Integration through AWS S3 Bucket. Refer to [Illumio Saas Log Integration](https://product-docs-repo.illumio.com/Tech-Docs/CloudSecure/out/en/administer-cloudsecure/connector.html#UUID-c14edaab-9726-1f23-9c4c-bc2937be39ee_section-idm234556433515698) for more information. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Illumio Saas to Microsoft Sentinel** +>**NOTE:** This connector fetches the Illumio Saas Flow logs from AWS S3 bucket +To gather data from Illumio, you need to configure the following resources +#### 1. AWS Role ARN + To gather data from Illumio, you'll need AWS Role ARN. +#### 2. AWS SQS Queue URL + To gather data from Illumio, you'll need AWS SQS Queue URL. + + +For detailed steps to retrieve the AWS Role ARN, SQS Queue URL, and configure Illumio log forwarding to the Amazon S3 bucket, refer to the [Connector Setup Guide](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IllumioSaaS/Data%20Connectors/IllumioSaasLogs_ccf/Readme.md). +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **AWS Role ARN** +- **AWS SQS Queue URL** +- **Table Name** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + +**Add Account** + +*Add Account* + +When you click the "Add Account" button in the portal, a configuration form will open. You'll need to provide: + +- **Role ARN** (optional): Enter Role ARN +- **Flow Log Queue URL** (optional): Enter Flow log SQL Queue URL + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + | | | |--------------------------|---| | **Tables Ingested** | `IllumioFlowEventsV2_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/illusive-platform.md b/Tools/Solutions Analyzer/connector-docs/solutions/illusive-platform.md index 97578aa8102..ac5ce69d358 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/illusive-platform.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/illusive-platform.md @@ -25,6 +25,69 @@ This solution provides **2 data connector(s)**. The Illusive Platform Connector allows you to share Illusive's attack surface analysis data and incident logs with Microsoft Sentinel and view this information in dedicated dashboards that offer insight into your organization's attack surface risk (ASM Dashboard) and track unauthorized lateral movement in your organization's network (ADS Dashboard). +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Illusive Common Event Format (CEF) logs to Syslog agent** + + 1. Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address. +> 2. Log onto the Illusive Console, and navigate to Settings->Reporting. +> 3. Find Syslog Servers +> 4. Supply the following information: +>> 1. Host name: Linux Syslog agent IP address or FQDN host name +>> 2. Port: 514 +>> 3. Protocol: TCP +>> 4. Audit messages: Send audit messages to server +> 5. To add the syslog server, click Add. +> 6. For more information about how to add a new syslog server in the Illusive platform, please find the Illusive Networks Admin Guide in here: https://support.illusivenetworks.com/hc/en-us/sections/360002292119-Documentation-by-Version + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/imperva-waf-gateway.md b/Tools/Solutions Analyzer/connector-docs/solutions/imperva-waf-gateway.md index fd4330d53f8..54d67377bc5 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/imperva-waf-gateway.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/imperva-waf-gateway.md @@ -21,6 +21,66 @@ This solution provides **1 data connector(s)**. The [Imperva](https://www.imperva.com) connector will allow you to quickly connect your Imperva WAF Gateway alerts to Azure Sentinel. This provides you additional insight into your organization's WAF traffic and improves your security operation capabilities. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Azure Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Azure Sentinel will use as the proxy between your security solution and Azure Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Azure Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Common Event Format (CEF) logs to Syslog agent** + +Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address. + +**3. SecureSphere MX Configuration** + +This connector requires an Action Interface and Action Set to be created on the Imperva SecureSphere MX. [Follow the steps](https://community.imperva.com/blogs/craig-burlingame1/2020/11/13/steps-for-enabling-imperva-waf-gateway-alert) to create the requirements. +**3.1 Create the Action Interface** + + Create a new Action Interface that contains the required parameters to send WAF alerts to Azure Sentinel. + + **3.2 Create the Action Set** + + Create a new Action Set that uses the Action Interface configured. + + **3.3 Apply the Action Set** + + Apply the Action Set to any Security Policies you wish to have alerts for sent to Azure Sentinel. +**4. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. +**4.1 Check for logs in the past 5 minutes using the following command. + +CommonSecurityLog | where DeviceVendor == "Imperva Inc." | where DeviceProduct == "WAF Gateway" | where TimeGenerated == ago(5m)** +**5. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/impervacloudwaf.md b/Tools/Solutions Analyzer/connector-docs/solutions/impervacloudwaf.md index b40f168646a..a7955d5bbf8 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/impervacloudwaf.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/impervacloudwaf.md @@ -25,6 +25,96 @@ This solution provides **2 data connector(s)**. The [Imperva Cloud WAF](https://www.imperva.com/resources/resource-library/datasheets/imperva-cloud-waf/) data connector provides the capability to integrate and ingest Web Application Firewall events into Microsoft Sentinel through the REST API. Refer to Log integration [documentation](https://docs.imperva.com/bundle/cloud-application-security/page/settings/log-integration.htm#Download) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **ImpervaAPIID**, **ImpervaAPIKey**, **ImpervaLogServerURI** are required for the API. [See the documentation to learn more about Setup Log Integration process](https://docs.imperva.com/bundle/cloud-application-security/page/settings/log-integration.htm#Setuplogintegration). Check all [requirements and follow the instructions](https://docs.imperva.com/bundle/cloud-application-security/page/settings/log-integration.htm#Setuplogintegration) for obtaining credentials. Please note that this connector uses CEF log event format. [More information](https://docs.imperva.com/bundle/cloud-application-security/page/more/log-file-structure.htm#Logfilestructure) about log format. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Imperva Cloud API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App. + +>**NOTE:**This data connector depends on a parser based on a Kusto Function to work as expected [**ImpervaWAFCloud**](https://aka.ms/sentinel-impervawafcloud-parser) which is deployed with the Microsoft Sentinel Solution. + +**STEP 1 - Configuration steps for the Log Integration** + + [Follow the instructions](https://docs.imperva.com/bundle/cloud-application-security/page/settings/log-integration.htm#Setuplogintegration) to obtain the credentials. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions** + +>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Imperva Cloud WAF data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-impervawafcloud-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. +3. Enter the **ImpervaAPIID**, **ImpervaAPIKey**, **ImpervaLogServerURI** and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Imperva Cloud WAF data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure functions development. + +1. Download the [Azure Functions App](https://aka.ms/sentinel-impervawafcloud-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ImpervaCloudXXXXX). + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select ** New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + ImpervaAPIID + ImpervaAPIKey + ImpervaLogServerURI + WorkspaceID + WorkspaceKey + logAnalyticsUri (optional) +> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +3. Once all application settings have been entered, click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `ImpervaWAFCloud_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/infoblox-cloud-data-connector.md b/Tools/Solutions Analyzer/connector-docs/solutions/infoblox-cloud-data-connector.md index 65082adcb9b..9f8ac01f48e 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/infoblox-cloud-data-connector.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/infoblox-cloud-data-connector.md @@ -25,6 +25,92 @@ This solution provides **2 data connector(s)**. The Infoblox Cloud Data Connector allows you to easily connect your Infoblox BloxOne data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**IMPORTANT:** This data connector depends on a parser based on a Kusto Function to work as expected called [**InfobloxCDC**](https://aka.ms/sentinel-InfobloxCloudDataConnector-parser) which is deployed with the Microsoft Sentinel Solution. + +>**IMPORTANT:** This Microsoft Sentinel data connector assumes an Infoblox Data Connector host has already been created and configured in the Infoblox Cloud Services Portal (CSP). As the [**Infoblox Data Connector**](https://docs.infoblox.com/display/BloxOneThreatDefense/Deploying+the+Data+Connector+Solution) is a feature of BloxOne Threat Defense, access to an appropriate BloxOne Threat Defense subscription is required. See this [**quick-start guide**](https://www.infoblox.com/wp-content/uploads/infoblox-deployment-guide-data-connector.pdf) for more information and licensing requirements. +**1. Follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note: CEF logs are collected only from Linux Agents_ + +1. Navigate to your **Microsoft Sentinel workspace > Data connectors** blade. + +2. Search for the **Common Event Format (CEF) via AMA** data connector and open it. + +3. Ensure there is no existing DCR configured to collect required facility of logs as it may cause log duplication. Create a new **DCR (Data Collection Rule)**. + + _Note: It is recommended to install the AMA agent v1.27 at minimum. [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplication._ + +4. Run the command provided in the **CEF via AMA data connector** page to configure the CEF collector on the machine. + + **Step B. Configure Infoblox BloxOne to send Syslog data to the Infoblox Cloud Data Connector to forward to the Syslog agent** + + Follow the steps below to configure the Infoblox CDC to send BloxOne data to Microsoft Sentinel via the Linux Syslog agent. +1. Navigate to **Manage > Data Connector**. +2. Click the **Destination Configuration** tab at the top. +3. Click **Create > Syslog**. + - **Name**: Give the new Destination a meaningful **name**, such as **Microsoft-Sentinel-Destination**. + - **Description**: Optionally give it a meaningful **description**. + - **State**: Set the state to **Enabled**. + - **Format**: Set the format to **CEF**. + - **FQDN/IP**: Enter the IP address of the Linux device on which the Linux agent is installed. + - **Port**: Leave the port number at **514**. + - **Protocol**: Select desired protocol and CA certificate if applicable. + - Click **Save & Close**. +4. Click the **Traffic Flow Configuration** tab at the top. +5. Click **Create**. + - **Name**: Give the new Traffic Flow a meaningful **name**, such as **Microsoft-Sentinel-Flow**. + - **Description**: Optionally give it a meaningful **description**. + - **State**: Set the state to **Enabled**. + - Expand the **Service Instance** section. + - **Service Instance**: Select your desired Service Instance for which the Data Connector service is enabled. + - Expand the **Source Configuration** section. + - **Source**: Select **BloxOne Cloud Source**. + - Select all desired **log types** you wish to collect. Currently supported log types are: + - Threat Defense Query/Response Log + - Threat Defense Threat Feeds Hits Log + - DDI Query/Response Log + - DDI DHCP Lease Log + - Expand the **Destination Configuration** section. + - Select the **Destination** you just created. + - Click **Save & Close**. +6. Allow the configuration some time to activate. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/infoblox-nios.md b/Tools/Solutions Analyzer/connector-docs/solutions/infoblox-nios.md index 68dc2d378b1..20877ff2099 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/infoblox-nios.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/infoblox-nios.md @@ -21,6 +21,53 @@ This solution provides **1 data connector(s)**. The [Infoblox Network Identity Operating System (NIOS)](https://www.infoblox.com/glossary/network-identity-operating-system-nios/) connector allows you to easily connect your Infoblox NIOS logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +**Custom Permissions:** +- **Infoblox NIOS**: must be configured to export logs via Syslog + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Infoblox and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20NIOS/Parser/Infoblox.yaml), on the second line of the query, enter any unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. + 2. Select **Apply below configuration to my machines** and select the facilities and severities. + 3. Click **Save**. +- **Open Syslog settings** + +**3. Configure and connect the Infoblox NIOS** + +[Follow these instructions](https://www.infoblox.com/wp-content/uploads/infoblox-deployment-guide-slog-and-snmp-configuration-for-nios.pdf) to enable syslog forwarding of Infoblox NIOS Logs. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address. + +**4. Configure the Sentinel parser** + +Update the watchlist 'Sources_by_Source' with the hostname(s) of your Infoblox device(s). Set SourceType to 'InfobloxNIOS' and Source to the value of 'Computer' seen in the logs seen in Syslog table. + | | | |--------------------------|---| | **Tables Ingested** | `Syslog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/infoblox-soc-insights.md b/Tools/Solutions Analyzer/connector-docs/solutions/infoblox-soc-insights.md index 750e57cd04e..b9e6c194f56 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/infoblox-soc-insights.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/infoblox-soc-insights.md @@ -41,6 +41,104 @@ This data connector ingests Infoblox SOC Insight CDC logs into your Log Analytic Using MMA and AMA on the same machine can cause log duplication and extra ingestion cost. [More details](https://learn.microsoft.com/en-us/azure/sentinel/ama-migrate). +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Workspace Keys** + +In order to use the playbooks as part of this solution, find your **Workspace ID** and **Workspace Primary Key** below for your convenience. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Workspace Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**2. Parsers** + +>This data connector depends on a parser based on a Kusto Function to work as expected called [**InfobloxCDC_SOCInsights**](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights/Parsers/InfobloxCDC_SOCInsights.yaml) which is deployed with the Microsoft Sentinel Solution. + +**3. SOC Insights** + +>This data connector assumes you have access to Infoblox BloxOne Threat Defense SOC Insights. You can find more information about SOC Insights [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/501514252/SOC+Insights). + +**4. Infoblox Cloud Data Connector** + +>This data connector assumes an Infoblox Data Connector host has already been created and configured in the Infoblox Cloud Services Portal (CSP). As the [**Infoblox Data Connector**](https://docs.infoblox.com/display/BloxOneThreatDefense/Deploying+the+Data+Connector+Solution) is a feature of BloxOne Threat Defense, access to an appropriate BloxOne Threat Defense subscription is required. See this [**quick-start guide**](https://www.infoblox.com/wp-content/uploads/infoblox-deployment-guide-data-connector.pdf) for more information and licensing requirements. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Within the Infoblox Cloud Services Portal, configure Infoblox BloxOne to send CEF Syslog data to the Infoblox Cloud Data Connector to forward to the Syslog agent** + +Follow the steps below to configure the Infoblox CDC to send BloxOne data to Microsoft Sentinel via the Linux Syslog agent. +1. Navigate to **Manage > Data Connector**. +2. Click the **Destination Configuration** tab at the top. +3. Click **Create > Syslog**. + - **Name**: Give the new Destination a meaningful **name**, such as **Microsoft-Sentinel-Destination**. + - **Description**: Optionally give it a meaningful **description**. + - **State**: Set the state to **Enabled**. + - **Format**: Set the format to **CEF**. + - **FQDN/IP**: Enter the IP address of the Linux device on which the Linux agent is installed. + - **Port**: Leave the port number at **514**. + - **Protocol**: Select desired protocol and CA certificate if applicable. + - Click **Save & Close**. +4. Click the **Traffic Flow Configuration** tab at the top. +5. Click **Create**. + - **Name**: Give the new Traffic Flow a meaningful **name**, such as **Microsoft-Sentinel-Flow**. + - **Description**: Optionally give it a meaningful **description**. + - **State**: Set the state to **Enabled**. + - Expand the **Service Instance** section. + - **Service Instance**: Select your desired Service Instance for which the Data Connector service is enabled. + - Expand the **Source Configuration** section. + - **Source**: Select **BloxOne Cloud Source**. + - Select the **Internal Notifications** Log Type. + - Expand the **Destination Configuration** section. + - Select the **Destination** you just created. + - Click **Save & Close**. +6. Allow the configuration some time to activate. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/infoblox.md b/Tools/Solutions Analyzer/connector-docs/solutions/infoblox.md index 9523f6d3efd..e5076e6051f 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/infoblox.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/infoblox.md @@ -14,11 +14,7 @@ ## Data Connectors -This solution provides **5 data connector(s)**. - -### [[Recommended] Infoblox Cloud Data Connector via AMA](../connectors/infobloxclouddataconnectorama.md) - -**Publisher:** Infoblox +This solution provides **4 data connector(s)**. ### [Infoblox Data Connector via REST API](../connectors/infobloxdataconnector.md) @@ -50,6 +46,104 @@ This data connector ingests Infoblox SOC Insight CDC logs into your Log Analytic Using MMA and AMA on the same machine can cause log duplication and extra ingestion cost. [More details](https://learn.microsoft.com/en-us/azure/sentinel/ama-migrate). +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Workspace Keys** + +In order to use the playbooks as part of this solution, find your **Workspace ID** and **Workspace Primary Key** below for your convenience. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Workspace Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**2. Parsers** + +>This data connector depends on a parser based on a Kusto Function to work as expected called [**InfobloxCDC_SOCInsights**](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights/Parsers/InfobloxCDC_SOCInsights.yaml) which is deployed with the Microsoft Sentinel Solution. + +**3. SOC Insights** + +>This data connector assumes you have access to Infoblox BloxOne Threat Defense SOC Insights. You can find more information about SOC Insights [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/501514252/SOC+Insights). + +**4. Infoblox Cloud Data Connector** + +>This data connector assumes an Infoblox Data Connector host has already been created and configured in the Infoblox Cloud Services Portal (CSP). As the [**Infoblox Data Connector**](https://docs.infoblox.com/display/BloxOneThreatDefense/Deploying+the+Data+Connector+Solution) is a feature of BloxOne Threat Defense, access to an appropriate BloxOne Threat Defense subscription is required. See this [**quick-start guide**](https://www.infoblox.com/wp-content/uploads/infoblox-deployment-guide-data-connector.pdf) for more information and licensing requirements. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Within the Infoblox Cloud Services Portal, configure Infoblox BloxOne to send CEF Syslog data to the Infoblox Cloud Data Connector to forward to the Syslog agent** + +Follow the steps below to configure the Infoblox CDC to send BloxOne data to Microsoft Sentinel via the Linux Syslog agent. +1. Navigate to **Manage > Data Connector**. +2. Click the **Destination Configuration** tab at the top. +3. Click **Create > Syslog**. + - **Name**: Give the new Destination a meaningful **name**, such as **Microsoft-Sentinel-Destination**. + - **Description**: Optionally give it a meaningful **description**. + - **State**: Set the state to **Enabled**. + - **Format**: Set the format to **CEF**. + - **FQDN/IP**: Enter the IP address of the Linux device on which the Linux agent is installed. + - **Port**: Leave the port number at **514**. + - **Protocol**: Select desired protocol and CA certificate if applicable. + - Click **Save & Close**. +4. Click the **Traffic Flow Configuration** tab at the top. +5. Click **Create**. + - **Name**: Give the new Traffic Flow a meaningful **name**, such as **Microsoft-Sentinel-Flow**. + - **Description**: Optionally give it a meaningful **description**. + - **State**: Set the state to **Enabled**. + - Expand the **Service Instance** section. + - **Service Instance**: Select your desired Service Instance for which the Data Connector service is enabled. + - Expand the **Source Configuration** section. + - **Source**: Select **BloxOne Cloud Source**. + - Select the **Internal Notifications** Log Type. + - Expand the **Destination Configuration** section. + - Select the **Destination** you just created. + - Click **Save & Close**. +6. Allow the configuration some time to activate. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | @@ -63,7 +157,7 @@ This solution ingests data into **20 table(s)**: | Table | Used By Connectors | |-------|-------------------| -| `CommonSecurityLog` | [[Deprecated] Infoblox SOC Insight Data Connector via Legacy Agent](../connectors/infobloxsocinsightsdataconnector-legacy.md), [[Recommended] Infoblox Cloud Data Connector via AMA](../connectors/infobloxclouddataconnectorama.md), [[Recommended] Infoblox SOC Insight Data Connector via AMA](../connectors/infobloxsocinsightsdataconnector-ama.md) | +| `CommonSecurityLog` | [[Deprecated] Infoblox SOC Insight Data Connector via Legacy Agent](../connectors/infobloxsocinsightsdataconnector-legacy.md), [[Recommended] Infoblox SOC Insight Data Connector via AMA](../connectors/infobloxsocinsightsdataconnector-ama.md) | | `Failed_Range_To_Ingest_CL` | [Infoblox Data Connector via REST API](../connectors/infobloxdataconnector.md) | | `InfobloxInsight_CL` | [Infoblox SOC Insight Data Connector via REST API](../connectors/infobloxsocinsightsdataconnector-api.md) | | `Infoblox_Failed_Indicators_CL` | [Infoblox Data Connector via REST API](../connectors/infobloxdataconnector.md) | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/integration-for-atlassian-beacon.md b/Tools/Solutions Analyzer/connector-docs/solutions/integration-for-atlassian-beacon.md index 3b7f45d3f73..93115b8d1d3 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/integration-for-atlassian-beacon.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/integration-for-atlassian-beacon.md @@ -21,6 +21,54 @@ This solution provides **1 data connector(s)**. Atlassian Beacon is a cloud product that is built for Intelligent threat detection across the Atlassian platforms (Jira, Confluence, and Atlassian Admin). This can help users detect, investigate and respond to risky user activity for the Atlassian suite of products. The solution is a custom data connector from DEFEND Ltd. that is used to visualize the alerts ingested from Atlassian Beacon to Microsoft Sentinel via a Logic App. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Microsoft Sentinel** + +>1. Navigate to the newly installed Logic App 'Atlassian Beacon Integration' + +>2. Navigate to 'Logic app designer' + +>3. Expand the 'When a HTTP request is received' + +>4. Copy the 'HTTP POST URL' + +**2. Atlassian Beacon** + +>1. Login to Atlassian Beacon using an admin account + +>2. Navigate to 'SIEM forwarding' under SETTINGS + +> 3. Paste the copied URL from Logic App in the text box + +> 4. Click the 'Save' button + +**3. Testing and Validation** + +>1. Login to Atlassian Beacon using an admin account + +>2. Navigate to 'SIEM forwarding' under SETTINGS + +> 3. Click the 'Test' button right next to the newly configured webhook + +> 4. Navigate to Microsoft Sentinel + +> 5. Navigate to the newly installed Logic App + +> 6. Check for the Logic App Run under 'Runs history' + +> 7. Check for logs under the table name 'atlassian_beacon_alerts_CL' in 'Logs' + +> 8. If the analytic rule has been enabled, the above Test alert should have created an incident in Microsoft Sentinel + | | | |--------------------------|---| | **Tables Ingested** | `atlassian_beacon_alerts_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/ionix.md b/Tools/Solutions Analyzer/connector-docs/solutions/ionix.md index 908b96d0493..c0f0385d0eb 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/ionix.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/ionix.md @@ -21,6 +21,25 @@ This solution provides **1 data connector(s)**. The IONIX Security Logs data connector, ingests logs from the IONIX system directly into Sentinel. The connector allows users to visualize their data, create alerts and incidents and improve security investigations. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **IONIX Subscription**: a subscription and account is required for IONIX logs. [One can be acquired here.](https://azuremarketplace.microsoft.com/en/marketplace/apps/cyberpion1597832716616.cyberpion) + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Follow the [instructions](https://www.ionix.io/integrations/azure-sentinel/) to integrate IONIX Security Alerts into Sentinel. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `CyberpionActionItems_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/ipinfo.md b/Tools/Solutions Analyzer/connector-docs/solutions/ipinfo.md index 8f5a2c9cba0..9837bd8af6c 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/ipinfo.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/ipinfo.md @@ -85,6 +85,73 @@ This solution provides **17 data connector(s)**. This IPinfo data connector installs an Azure Function app to download WHOIS_POC datasets and insert it into custom log table in Microsoft Sentinel +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **IPinfo API Token**: Retrieve your IPinfo API Token [here](https://ipinfo.io/). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Retrieve API Token** + +Retrieve your IPinfo API Token [here](https://ipinfo.io/). + +**2. In your Azure AD tenant, create an Azure Active Directory (AAD) application** + +In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link. + +**3. Assign the AAD application the Microsoft Sentinel Contributor Role.** + +Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same “Resource Group” you use for “Log Analytic Workspace” on which “Microsoft Sentinel” is added: Use this Link. + +**4. Get Workspace Resource ID** + +Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' + +**5. Deploy the Azure Function** + +Use this for automated deployment of the IPinfo data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-WHOIS-POC-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**. + +**1. Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + 1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-WHOIS-POC-functionapp). +2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. +3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. +4. After successful deployment of the function app, follow the next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. Go to Azure Portal for the Function App configuration. +2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. +3. In the **Application settings** tab, select **+ New application setting**. +4. Add each of the following application settings individually, with their respective string values (case-sensitive): + RESOURCE_ID + IPINFO_TOKEN + TENANT_ID + CLIENT_ID + CLIENT_SECRET + RETENTION_IN_DAYS + TOTAL_RETENTION_IN_DAYS + SCHEDULE + LOCATION +5. Once all application settings have been entered, click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `Ipinfo_WHOIS_POC_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/isc-bind.md b/Tools/Solutions Analyzer/connector-docs/solutions/isc-bind.md index ded13c59b1b..43f2a0f5d80 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/isc-bind.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/isc-bind.md @@ -21,6 +21,51 @@ This solution provides **1 data connector(s)**. The [ISC Bind](https://www.isc.org/bind/) connector allows you to easily connect your ISC Bind logs with Microsoft Sentinel. This gives you more insight into your organization's network traffic data, DNS query data, traffic statistics and improves your security operation capabilities. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +**Custom Permissions:** +- **ISC Bind**: must be configured to export logs via Syslog + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias ISCBind and load the function code or click [here](https://aka.ms/sentinel-iscbind-parser).The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. + 2. Select **Apply below configuration to my machines** and select the facilities and severities. + 3. Click **Save**. +- **Open Syslog settings** + +**3. Configure and connect the ISC Bind** + +1. Follow these instructions to configure the ISC Bind to forward syslog: + - [DNS Logs](https://kb.isc.org/docs/aa-01526) +2. Configure Syslog to send the Syslog traffic to Agent. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address. + | | | |--------------------------|---| | **Tables Ingested** | `Syslog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/island.md b/Tools/Solutions Analyzer/connector-docs/solutions/island.md index 1864a43ef1a..72f8fdee53f 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/island.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/island.md @@ -26,6 +26,24 @@ This solution provides **2 data connector(s)**. The [Island](https://www.island.io) connector provides the capability to ingest Island User Activity logs into Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Custom Permissions:** +- **Island API Key**: An Island API key is required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Island to Microsoft Sentinel** + +Provide the Island API URL and Key. API URL is https://management.island.io/api/external/v1/timeline for US or https://eu.management.island.io/api/external/v1/timeline for EU. + Generate the API Key in the Management Console under Settings > API. +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `APIKey`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + | | | |--------------------------|---| | **Tables Ingested** | `Island_User_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/ivanti-unified-endpoint-management.md b/Tools/Solutions Analyzer/connector-docs/solutions/ivanti-unified-endpoint-management.md index 7b3f20da9bf..1ad9bc9f247 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/ivanti-unified-endpoint-management.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/ivanti-unified-endpoint-management.md @@ -21,6 +21,52 @@ This solution provides **1 data connector(s)**. The [Ivanti Unified Endpoint Management](https://www.ivanti.com/products/unified-endpoint-manager) data connector provides the capability to ingest [Ivanti UEM Alerts](https://help.ivanti.com/ld/help/en_US/LDMS/11.0/Windows/alert-c-monitoring-overview.htm) into Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**IvantiUEMEvent**](https://aka.ms/sentinel-ivantiuem-parser) which is deployed with the Microsoft Sentinel Solution. + +>**NOTE:** This data connector has been developed using Ivanti Unified Endpoint Management Release 2021.1 Version 11.0.3.374 + +**1. Install and onboard the agent for Linux or Windows** + +Install the agent on the Server where the Ivanti Unified Endpoint Management Alerts are forwarded. + +> Logs from Ivanti Unified Endpoint Management Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure Ivanti Unified Endpoint Management alert forwarding.** + +[Follow the instructions](https://help.ivanti.com/ld/help/en_US/LDMS/11.0/Windows/alert-t-define-action.htm) to set up Alert Actions to send logs to syslog server. + | | | |--------------------------|---| | **Tables Ingested** | `Syslog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/jamf-protect.md b/Tools/Solutions Analyzer/connector-docs/solutions/jamf-protect.md index 9d5859069da..19dd165c41b 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/jamf-protect.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/jamf-protect.md @@ -22,6 +22,44 @@ This solution provides **1 data connector(s)**. The [Jamf Protect](https://www.jamf.com/products/jamf-protect/) connector provides the capability to read raw event data from Jamf Protect in Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Custom Permissions:** +- **Microsoft Entra**: Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher. +- **Microsoft Azure**: Permission to assign Monitoring Metrics Publisher role on data collection rule (DCR). Typically requires Azure RBAC Owner or User Access Administrator role + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Create ARM Resources and Provide the Required Permissions** + +This connector reads data from the tables that Jamf Protect uses in a Microsoft Analytics Workspace, if the [data forwarding](https://docs.jamf.com/jamf-protect/documentation/Data_Forwarding_to_a_Third_Party_Storage_Solution.html?hl=sentinel#task-4227) option is enabled in Jamf Protect then raw event data is sent to the Microsoft Sentinel Ingestion API. +#### Automated Configuration and Secure Data Ingestion with Entra Application +Clicking on "Deploy" will trigger the creation of Log Analytics tables and a Data Collection Rule (DCR). +It will then create an Entra application, link the DCR to it, and set the entered secret in the application. This setup enables data to be sent securely to the DCR using an Entra token. +Deploy Jamf Protect connector resources + +**2. Push your logs into the workspace** + +Use the following parameters to configure the your machine to send the logs to the workspace. +- **Tenant ID (Directory ID)**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Entra App Registration Application ID**: `ApplicationId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Entra App Registration Secret**: `ApplicationSecret` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Data Collection Endpoint Uri**: `DataCollectionEndpoint` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Data Collection Rule Immutable ID**: `DataCollectionRuleId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Unified Logs Stream Name**: `Custom-jamfprotectunifiedlogs` +- **Telemetry Stream Name**: `Custom-jamfprotecttelemetryv2` +- **Alerts Stream Name**: `Custom-jamfprotectalerts` + | | | |--------------------------|---| | **Tables Ingested** | `jamfprotectalerts_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/jboss.md b/Tools/Solutions Analyzer/connector-docs/solutions/jboss.md index a48f2adf972..3d58b1e98b8 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/jboss.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/jboss.md @@ -21,6 +21,74 @@ This solution provides **1 data connector(s)**. The JBoss Enterprise Application Platform data connector provides the capability to ingest [JBoss](https://www.redhat.com/en/technologies/jboss-middleware/application-platform) events into Microsoft Sentinel. Refer to [Red Hat documentation](https://access.redhat.com/documentation/en-us/red_hat_jboss_enterprise_application_platform/7.0/html/configuration_guide/logging_with_jboss_eap) for more information. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**JBossEvent**](https://aka.ms/sentinel-jbosseap-parser) which is deployed with the Microsoft Sentinel Solution. + +>**NOTE:** This data connector has been developed using JBoss Enterprise Application Platform 7.4.0. + +**1. Install and onboard the agent for Linux or Windows** + +Install the agent on the JBoss server where the logs are generated. + +> Logs from JBoss Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure the logs to be collected** + +Configure the custom log directory to be collected +- **Open custom logs settings** + +1. Select the link above to open your workspace advanced settings +2. Click **+Add custom** +3. Click **Browse** to upload a sample of a JBoss log file (e.g. server.log). Then, click **Next >** +4. Select **Timestamp** as the record delimiter and select Timestamp format **YYYY-MM-DD HH:MM:SS** from the dropdown list then click **Next >** +5. Select **Windows** or **Linux** and enter the path to JBoss logs based on your configuration. Example: + - **Linux** Directory: + +>Standalone server: EAP_HOME/standalone/log/server.log + +>Managed domain: EAP_HOME/domain/servers/SERVER_NAME/log/server.log + +6. After entering the path, click the '+' symbol to apply, then click **Next >** +7. Add **JBossLogs** as the custom log Name and click **Done** + +**3. Check logs in Microsoft Sentinel** + +Open Log Analytics to check if the logs are received using the JBossLogs_CL Custom log table. + +>**NOTE:** It may take up to 30 minutes before new logs will appear in JBossLogs_CL table. + | | | |--------------------------|---| | **Tables Ingested** | `JBossLogs_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/juniper-srx.md b/Tools/Solutions Analyzer/connector-docs/solutions/juniper-srx.md index f0345aafc9b..4888e66c926 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/juniper-srx.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/juniper-srx.md @@ -21,6 +21,52 @@ This solution provides **1 data connector(s)**. The [Juniper SRX](https://www.juniper.net/us/en/products-services/security/srx-series/) connector allows you to easily connect your Juniper SRX logs with Microsoft Sentinel. This gives you more insight into your organization's network and improves your security operation capabilities. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +**Custom Permissions:** +- **Juniper SRX**: must be configured to export logs via Syslog + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias JuniperSRX and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Juniper%20SRX/Parsers/JuniperSRX.txt), on the second line of the query, enter the hostname(s) of your JuniperSRX device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. + 2. Select **Apply below configuration to my machines** and select the facilities and severities. + 3. Click **Save**. +- **Open Syslog settings** + +**3. Configure and connect the Juniper SRX** + +1. Follow these instructions to configure the Juniper SRX to forward syslog: + - [Traffic Logs (Security Policy Logs)](https://kb.juniper.net/InfoCenter/index?page=content&id=KB16509&actp=METADATA) + - [System Logs](https://kb.juniper.net/InfoCenter/index?page=content&id=kb16502) +2. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address. + | | | |--------------------------|---| | **Tables Ingested** | `Syslog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/juniperidp.md b/Tools/Solutions Analyzer/connector-docs/solutions/juniperidp.md index 704f59c65e2..cb50f83beef 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/juniperidp.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/juniperidp.md @@ -21,6 +21,64 @@ This solution provides **1 data connector(s)**. The [Juniper](https://www.juniper.net/) IDP data connector provides the capability to ingest [Juniper IDP](https://www.juniper.net/documentation/us/en/software/junos/idp-policy/topics/topic-map/security-idp-overview.html) events into Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on Kusto Function to work as expected [**JuniperIDP**](https://aka.ms/sentinel-JuniperIDP-parser) which is deployed with the Microsoft Sentinel Solution. + +>**NOTE:** IDP OS 5.1 and above is supported by this data connector. + +**1. Install and onboard the agent for Linux or Windows** + +Install the agent on the Server. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure the logs to be collected** + +Follow the configuration steps below to get Juniper IDP logs into Microsoft Sentinel. This configuration enriches events generated by Juniper IDP module to provide visibility on log source information for Juniper IDP logs. Refer to the [Azure Monitor Documentation](https://docs.microsoft.com/azure/azure-monitor/agents/data-sources-json) for more details on these steps. +1. Download config file [juniper_idp.conf](https://aka.ms/sentinel-JuniperIDP-conf). +2. Login to the server where you have installed Azure Log Analytics agent. +3. Copy juniper_idp.conf to the /etc/opt/microsoft/omsagent/**workspace_id**/conf/omsagent.d/ folder. +4. Edit juniper_idp.conf as follows: + + i. change the listen port for receiving logs based on your configuration (line 3) + + ii. replace **workspace_id** with real value of your Workspace ID (lines 58,59,60,63) +5. Save changes and restart the Azure Log Analytics agent for Linux service with the following command: + sudo /opt/microsoft/omsagent/bin/service_control restart +6. To configure a remote syslog destination, please reference the [SRX Getting Started - Configure System Logging](https://kb.juniper.net/InfoCenter/index?page=content&id=kb16502). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `JuniperIDP_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/keeper-security.md b/Tools/Solutions Analyzer/connector-docs/solutions/keeper-security.md index b3cf61064cb..d7014ae410e 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/keeper-security.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/keeper-security.md @@ -22,6 +22,65 @@ This solution provides **1 data connector(s)**. The [Keeper Security](https://keepersecurity.com) connector provides the capability to read raw event data from Keeper Security in Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Custom Permissions:** +- **Microsoft Entra**: Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher. +- **Microsoft Azure**: Permission to assign Monitoring Metrics Publisher role on data collection rule (DCR). Typically requires Azure RBAC Owner or User Access Administrator role + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Create ARM Resources and Provide the Required Permissions** + +This connector reads data from the tables that Keeper Security uses in a Microsoft Analytics Workspace, if the [data forwarding](https://docs.keepersecurity.com/docs/data-forwarding) option is enabled in Keeper Security then raw event data is sent to the Microsoft Sentinel Ingestion API. +#### Automated Configuration and Secure Data Ingestion with Entra Application +Clicking on "Deploy" will trigger the creation of Log Analytics tables and a Data Collection Rule (DCR). +It will then create an Entra application, link the DCR to it, and set the entered secret in the application. This setup enables data to be sent securely to the DCR using an Entra token. +Keeper Security connector resources + +**2. Push your logs into the workspace** + +Use the following parameters to configure the your machine to send the logs to the workspace. +- **Tenant ID (Directory ID)**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Entra App Registration Application ID**: `ApplicationId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Entra App Registration Secret**: `ApplicationSecret` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Data Collection Endpoint Uri**: `DataCollectionEndpoint` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Data Collection Rule Immutable ID**: `DataCollectionRuleId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Events Logs Stream Name**: `Custom-KeeperSecurityEventNewLogs` + +**3. Update Keeper Admin Console** + +Configure the Keeper Admin Console with the Azure connection details to enable data forwarding to Microsoft Sentinel. +#### Configure Azure Monitor Logs in Keeper Admin Console + +In the [Keeper Admin Console](https://keepersecurity.com/console/), login as the Keeper Administrator. Then go to **Reporting & Alerts** and select **Azure Monitor Logs**. + +Provide the following information from Step 2 above into the Admin Console: + +- **Azure Tenant ID**: You can find this from Azure's "Subscriptions" area. +- **Application (client) ID**: This is located in the App registration (KeeperLogging) overview screen +- **Client Secret Value**: This is the Client Secret Value from the app registration secrets. +- **Endpoint URL**: This is a URL that is created in the following specific format: + `https:///dataCollectionRules//streams/
?api-version=2023-01-01` + +To assemble the Endpoint URL: + +- **** This comes from Step 2 above +- **** From the Data Collector Rule, copy the "Immutable Id" value, e.g. `dcr-xxxxxxx` +- **
** This is the table name created by Azure, e.g. `Custom-KeeperSecurityEventNewLogs` + +Example: `https:///dataCollectionRules//streams/Custom-KeeperSecurityEventNewLogs?api-version=2023-01-01` + | | | |--------------------------|---| | **Tables Ingested** | `KeeperSecurityEventNewLogs_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/lastpass.md b/Tools/Solutions Analyzer/connector-docs/solutions/lastpass.md index 3c319f619a5..6161c9e8248 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/lastpass.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/lastpass.md @@ -22,6 +22,23 @@ This solution provides **1 data connector(s)**. The [LastPass Enterprise](https://www.lastpass.com/products/enterprise-password-management-and-sso) connector provides the capability to LastPass reporting (audit) logs into Microsoft Sentinel. The connector provides visibility into logins and activity within LastPass (such as reading and removing passwords). +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Custom Permissions:** +- **LastPass API Key and CID**: A LastPass API key and CID are required. [See the documentation to learn more about LastPass API](https://support.logmeininc.com/lastpass/help/use-the-lastpass-provisioning-api-lp010068). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect LastPass Enterprise to Microsoft Sentinel** + +Provide the LastPass Provisioning API Key. +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `APIKey`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + | | | |--------------------------|---| | **Tables Ingested** | `LastPassNativePoller_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/lookout-cloud-security-platform-for-microsoft-sentinel.md b/Tools/Solutions Analyzer/connector-docs/solutions/lookout-cloud-security-platform-for-microsoft-sentinel.md index c09fb3df3a3..a378269b365 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/lookout-cloud-security-platform-for-microsoft-sentinel.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/lookout-cloud-security-platform-for-microsoft-sentinel.md @@ -21,6 +21,120 @@ This solution provides **1 data connector(s)**. This connector uses a Agari REST API connection to push data into Microsoft Sentinel Log Analytics. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Agari REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**Step-by-Step Instructions** + + As a prerequisite to this integration, first, you need to configure an API client on Lookout's Management Console. From the Management Console, you can add one or more clients and configure the appropriate permissions and actions for each. + + 1. Name - The name given to this client. + + 2. Client ID - the unique ID that was provided for this client. + + 3. Permissions - The permissions enabled for this client. The permissions you check are those that the client will be allowed to access. The listed options are Activity, Violation, Anomaly, Insights, and Profile + + 4. Service URL - The URL used to access this client.It must start with https:// + + 5. Authorized IPs - The valid IP address or addresses that apply to this client. + + 6. Actions - The actions you can take for this client. Click the icon for the action you want to perform. Editing client information, displaying the client secret, or deleting the client. + + **To add a new API client:** + + 1. Go to Administration > Enterprise Integration > API Clients and click New. + + 2. Enter a Name (required) and a Description (optional). + + 3. Enter the Client ID that was provided to you. + + 4. Select one or more Permissions from the dropdown list. + + 5. Enter one or more Authorized IP addresses for this client. Separate each address with a comma. + + 6. Click Save. + + When prompted, copy the string for the client's secret. You will need this information (along with the client ID) to authenticate to the API gateway. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Azure Blob Storage connection string and container name, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-LookoutCS-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Lookout Client ID**, **Lookout Client Secret**, **Lookout Base url**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key** +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-Lookout-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + LookoutClientId + LookoutApiSecret + Baseurl + WorkspaceID + PrimaryKey + logAnalyticsUri (Optional) + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `LookoutCloudSecurity_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/lookout.md b/Tools/Solutions Analyzer/connector-docs/solutions/lookout.md index d7763761589..067160fef0d 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/lookout.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/lookout.md @@ -25,6 +25,21 @@ This solution provides **2 data connector(s)**. The [Lookout Mobile Threat Detection](https://lookout.com) data connector provides the capability to ingest events related to mobile security risks into Microsoft Sentinel through the Mobile Risk API. Refer to [API documentation](https://enterprise.support.lookout.com/hc/en-us/articles/115002741773-Mobile-Risk-API-Guide) for more information. This connector helps you examine potential security risks detected in mobile devices. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions on the workspace are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Lookout Mobile Threat Defence connector to Microsoft Sentinel** +Before connecting to Lookout, ensure the following prerequisites are completed. +#### 1. **ApiKey** is required for Mobile Threat Detection API. See the [documentation](https://enterprise.support.lookout.com/hc/en-us/articles/115002741773-Mobile-Risk-API-Guide) to learn more about API. Check all requirements and follow the [instructions](https://enterprise.support.lookout.com/hc/en-us/articles/115002741773-Mobile-Risk-API-Guide#authenticatingwiththemobileriskapi) for obtaining credentials. +- **API key**: (password field) +- Click 'Connect' to establish connection + | | | |--------------------------|---| | **Tables Ingested** | `LookoutMtdV2_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/lumen-defender-threat-feed.md b/Tools/Solutions Analyzer/connector-docs/solutions/lumen-defender-threat-feed.md index a1ca794411f..d1e9da6cef7 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/lumen-defender-threat-feed.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/lumen-defender-threat-feed.md @@ -22,6 +22,75 @@ This solution provides **1 data connector(s)**. The [Lumen Defender Threat Feed](https://bll-analytics.mss.lumen.com/analytics) connector provides the capability to ingest STIX-formatted threat intelligence indicators from Lumen's Black Lotus Labs research team into Microsoft Sentinel. The connector automatically downloads and uploads daily threat intelligence indicators including IPv4 addresses and domains to the ThreatIntelIndicators table via the STIX Objects Upload API. +**Permissions:** + +**Resource Provider Permissions:** +- **Log Analytics Workspace** (Workspace): Read and write permissions on the Log Analytics workspace are required. + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Azure Entra App Registration**: An Azure Entra application registration with the Microsoft Sentinel Contributor role assigned is required for STIX Objects API access. [See the documentation to learn more about Azure Entra applications](https://docs.microsoft.com/azure/active-directory/develop/quickstart-register-app). +- **Microsoft Sentinel Contributor Role**: Microsoft Sentinel Contributor role is required for the Azure Entra application to upload threat intelligence indicators. +- **Lumen Defender Threat Feed API Key**: A Lumen Defender Threat Feed API Key is required for accessing threat intelligence data. [Contact Lumen for API access](mailto:DefenderThreatFeedSales@Lumen.com?subject=API%20Access%20Request). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions with Durable Functions to connect to the Lumen Defender Threat Feed API and upload threat intelligence indicators to Microsoft Sentinel via the STIX Objects API. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +**1. Configuration** + +**STEP 1 - Obtain Lumen Defender Threat Feed API Key** + +1. [Contact Lumen](mailto:DefenderThreatFeedSales@Lumen.com?subject=API%20Access%20Request) to obtain API access to our Threat Feed API service +2. Obtain your API key for authentication. + +**STEP 2 - Configure Azure Entra ID Application and gather information** + +1. Create an Entra application. [See the documentation for a guide to registering an application in Microsoft Entra ID.](https://learn.microsoft.com/en-us/entra/identity-platform/quickstart-register-app) +2. Create a client secret and note the Application ID, Tenant ID, and Client Secret +4. Assign the **Microsoft Sentinel Contributor** role to the application on your Microsoft Sentinel Log Analytics Workspace +5. Make note of your Workspace ID, as well as the App Insights Workspace Resource ID, which can be obtained from the overview page of the Log Analytics Workspace for your Microsoft Sentinel instance. Click on the “JSON View” link in the top right and the Resource ID will be displayed at the top with a copy button. +- **Tenant ID**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**STEP 3 - Enable the Threat Intelligence Upload Indicators API (Preview) data connector in Microsoft Sentinel** + +1. Deploy the **Threat Intelligence (New) Solution**, which includes the **Threat Intelligence Upload Indicators API (Preview)** +2. Browse to the Content Hub, find and select the **Threat Intelligence (NEW)** solution. +3. Select the **Install/Update** button. + +**STEP 4 - Deploy the Azure Function** + +**IMPORTANT:** Before deploying the Lumen Defender Threat Feed connector, have the Tenant ID, Workspace ID, App Insights Workspace Resource ID, Azure Entra application details (Client ID, Client Secret), and Lumen API key readily available. + +1. Click the Deploy to Azure button. + +[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FLumen%20Defender%20Threat%20Feed%2FData%2520Connectors%2FLumenThreatFeed%2Fazuredeploy_Connector_LumenThreatFeed_AzureFunction.json) + +2. Fill in the appropriate values for each parameter: + +- Subscription: Confirm the correct subscription is selected or use the dropdown to change your selection +- Resource Group: Select the resource group to be used by the Function App and related resources +- Function Name: Enter a globally unique name with an 11-character limit. Adhere to your organization’s naming convention and ensure the name is globally unique since it is used (along with the uniqueString() function) to identify the ARM template being deployed. +- Workspace ID: Found in the "Overview" tab for the Log Analytics Workspace of the Microsoft Sentinel instance and provided for convenience on the connector information page. +- Lumen API Key: Obtain an API key through Lumen support +- Lumen Base URL: Filled in automatically and should generally not be changed. This URL contains API endpoints used by the connector +- Tenant ID: Obtained from the Entra App Registration overview page for the registered application (listed as Directory ID) and can also be obtained from the Tenant Information page in Azure +- Client ID: Obtained from the Entra App Registration overview page for the registered application (listed as Application ID) +- Client Secret: Obtained when the secret is created during the app registration process. It can only be viewed when first created and is hidden permanently afterwards. Rerun the app registration process to obtain a new Client Secret if necessary. +- App Insights Workspace Resource ID: Obtained from the overview page of the Log Analytics Workspace for your Microsoft Sentinel instance. Click on the "JSON View" link in the top right and the Resource ID will be displayed at the top with a copy button. +- Blob Container Name: Use the default name unless otherwise required. Azure Blob Storage is used for temporary storage and processing of threat indicators. + +**STEP 5 - Verify Deployment** + +1. The connector polls for indicator updates every 15 minutes. +2. Monitor the Function App logs in the Azure Portal to verify successful execution +3. After the app performs its first run, review the indicators ingested by either viewing the “Lumen Defender Threat Feed Overview” workbook or viewing the “Threat Intelligence” section in Microsoft Sentinel. In Microsoft Sentinel “Threat Intelligence”, filter for source “Lumen” to display only Lumen generated indicators. + | | | |--------------------------|---| | **Tables Ingested** | `ThreatIntelIndicators` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/mailguard-365.md b/Tools/Solutions Analyzer/connector-docs/solutions/mailguard-365.md index 7fd2710f8aa..28c9a09cdce 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/mailguard-365.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/mailguard-365.md @@ -22,6 +22,28 @@ This solution provides **1 data connector(s)**. MailGuard 365 Enhanced Email Security for Microsoft 365. Exclusive to the Microsoft marketplace, MailGuard 365 is integrated with Microsoft 365 security (incl. Defender) for enhanced protection against advanced email threats like phishing, ransomware and sophisticated BEC attacks. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Configure and connect MailGuard 365** + +1. In the MailGuard 365 Console, click **Settings** on the navigation bar. +2. Click the **Integrations** tab. +3. Click the **Enable Microsoft Sentinel**. +4. Enter your workspace id and primary key from the fields below, click **Finish**. +5. For additional instructions, please contact MailGuard 365 support. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `MailGuard365_Threats_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/mailrisk.md b/Tools/Solutions Analyzer/connector-docs/solutions/mailrisk.md index d98848b6304..438734ad8cc 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/mailrisk.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/mailrisk.md @@ -22,6 +22,29 @@ This solution provides **1 data connector(s)**. The MailRisk by Secure Practice connector allows you to ingest email threat intelligence data from the MailRisk API into Microsoft Sentinel. This connector provides visibility into reported emails, risk assessments, and security events related to email threats. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **API credentials**: Your Secure Practice API key pair is also needed, which are created in the [settings in the admin portal](https://manage.securepractice.co/settings/security). Generate a new key pair with description `Microsoft Sentinel`. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Obtain Secure Practice API Credentials** + +Log in to your Secure Practice account and generate an API Key and API Secret if you haven't already. + +**2. Connect to MailRisk API** + +Enter your Secure Practice API credentials below. The credentials will be securely stored and used to authenticate API requests. +- **API Key**: Enter your Secure Practice API Key +- **API Secret**: (password field) +- Click 'Connect' to establish connection + | | | |--------------------------|---| | **Tables Ingested** | `MailRiskEventEmails_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/mcafee-epolicy-orchestrator.md b/Tools/Solutions Analyzer/connector-docs/solutions/mcafee-epolicy-orchestrator.md index 6c92ac79288..7fe192ea9f5 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/mcafee-epolicy-orchestrator.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/mcafee-epolicy-orchestrator.md @@ -21,6 +21,47 @@ This solution provides **1 data connector(s)**. The McAfee ePolicy Orchestrator data connector provides the capability to ingest [McAfee ePO](https://www.mcafee.com/enterprise/en-us/products/epolicy-orchestrator.html) events into Microsoft Sentinel through the syslog. Refer to [documentation](https://docs.mcafee.com/bundle/epolicy-orchestrator-landing/page/GUID-0C40020F-5B7F-4549-B9CC-0E017BC8797F.html) for more information. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>This data connector depends on a parser based on a Kusto Function to work as expected [**McAfeeEPOEvent**](https://aka.ms/sentinel-McAfeeePO-parser) which is deployed with the Microsoft Sentinel Solution. + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + +1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. +2. Select **Apply below configuration to my machines** and select the facilities and severities. +3. Click **Save**. +- **Open Syslog settings** + +**3. Configure McAfee ePolicy Orchestrator event forwarding to Syslog server** + +[Follow these instructions](https://kcm.trellix.com/corporate/index?page=content&id=KB87927) to add register syslog server. + | | | |--------------------------|---| | **Tables Ingested** | `Syslog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/mcafee-network-security-platform.md b/Tools/Solutions Analyzer/connector-docs/solutions/mcafee-network-security-platform.md index 356459a6ece..daed117d34e 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/mcafee-network-security-platform.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/mcafee-network-security-platform.md @@ -21,6 +21,58 @@ This solution provides **1 data connector(s)**. The [McAfee® Network Security Platform](https://www.mcafee.com/enterprise/en-us/products/network-security-platform.html) data connector provides the capability to ingest [McAfee® Network Security Platform events](https://docs.mcafee.com/bundle/network-security-platform-10.1.x-integration-guide-unmanaged/page/GUID-8C706BE9-6AC9-4641-8A53-8910B51207D8.html) into Microsoft Sentinel. Refer to [McAfee® Network Security Platform](https://docs.mcafee.com/bundle/network-security-platform-10.1.x-integration-guide-unmanaged/page/GUID-F7D281EC-1CC9-4962-A7A3-5A9D9584670E.html) for more information. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**McAfeeNSPEvent**](https://aka.ms/sentinel-mcafeensp-parser) which is deployed with the Microsoft Sentinel Solution. + +>**NOTE:** This data connector has been developed using McAfee® Network Security Platform version: 10.1.x + +**1. Install and onboard the agent for Linux or Windows** + +Install the agent on the Server where the McAfee® Network Security Platform logs are forwarded. + +> Logs from McAfee® Network Security Platform Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure McAfee® Network Security Platform event forwarding** + +Follow the configuration steps below to get McAfee® Network Security Platform logs into Microsoft Sentinel. +1. [Follow these instructions](https://docs.mcafee.com/bundle/network-security-platform-10.1.x-product-guide/page/GUID-E4A687B0-FAFB-4170-AC94-1D968A10380F.html) to forward alerts from the Manager to a syslog server. +2. Add a syslog notification profile, [more details here](https://docs.mcafee.com/bundle/network-security-platform-10.1.x-product-guide/page/GUID-5BADD5D7-21AE-4E3B-AEE2-A079F3FD6A38.html). This is mandatory. While creating profile, to make sure that events are formatted correctly, enter the following text in the Message text box: + :|SENSOR_ALERT_UUID|ALERT_TYPE|ATTACK_TIME|ATTACK_NAME|ATTACK_ID + |ATTACK_SEVERITY|ATTACK_SIGNATURE|ATTACK_CONFIDENCE|ADMIN_DOMAIN|SENSOR_NAME|INTERFACE + |SOURCE_IP|SOURCE_PORT|DESTINATION_IP|DESTINATION_PORT|CATEGORY|SUB_CATEGORY + |DIRECTION|RESULT_STATUS|DETECTION_MECHANISM|APPLICATION_PROTOCOL|NETWORK_PROTOCOL| + | | | |--------------------------|---| | **Tables Ingested** | `Syslog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-365.md b/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-365.md index 746a0d416c4..47c5c08bdcf 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-365.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-365.md @@ -13,33 +13,8 @@ ## Data Connectors -This solution provides **1 data connector(s)**. +**This solution does not include data connectors.** -### [Microsoft 365 (formerly, Office 365)](../connectors/office365.md) - -**Publisher:** Microsoft - -The Microsoft 365 (formerly, Office 365) activity log connector provides insight into ongoing user activities. You will get details of operations such as file downloads, access requests sent, changes to group events, set-mailbox and details of the user who performed the actions. By connecting Microsoft 365 logs into Microsoft Sentinel you can use this data to view dashboards, create custom alerts, and improve your investigation process. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2219943&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -| | | -|--------------------------|---| -| **Tables Ingested** | `OfficeActivity` | -| | `exchange` | -| | `sharePoint` | -| | `teams` | -| **Connector Definition Files** | [Microsoft365.JSON](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20365/Data%20Connectors/Microsoft365.JSON) | - -[→ View full connector details](../connectors/office365.md) - -## Tables Reference - -This solution ingests data into **4 table(s)**: - -| Table | Used By Connectors | -|-------|-------------------| -| `OfficeActivity` | [Microsoft 365 (formerly, Office 365)](../connectors/office365.md) | -| `exchange` | [Microsoft 365 (formerly, Office 365)](../connectors/office365.md) | -| `sharePoint` | [Microsoft 365 (formerly, Office 365)](../connectors/office365.md) | -| `teams` | [Microsoft 365 (formerly, Office 365)](../connectors/office365.md) | +This solution may contain other components such as analytics rules, workbooks, hunting queries, or playbooks. [← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-business-applications.md b/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-business-applications.md index 9955594226e..257fe5a23b3 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-business-applications.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-business-applications.md @@ -25,6 +25,83 @@ Dynamics 365 for Finance and Operations is a comprehensive Enterprise Resource P The Dynamics 365 Finance and Operations data connector ingests Dynamics 365 Finance and Operations admin activities and audit logs as well as user business process and application activities logs into Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +**Custom Permissions:** +- **Microsoft Entra app registration**: Application client ID and secret used to access Dynamics 365 Finance and Operations. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>Connectivity to Finance and Operations requires a Microsoft Entra app registration (client ID and secret). You'll also need the Microsoft Entra tenant ID and the Finance Operations Organization URL. + +To enable data collection, create a role in Dynamics 365 Finance and Operations with permissions to view the Database Log entity. Assign this role to a dedicated Finance and Operations user, mapped to the client ID of a Microsoft Entra app registration. Follow these steps to complete the process: + +**2. Step 1 - Microsoft Entra app registration** + +1. Navigate to the [Microsoft Entra portal](https://entra.microsoft.com). +2. Under Applications, click on **App Registrations** and create a new app registration (leave all defaults). +3. Open the new app registration and create a new secret. +4. Retain the **Tenant ID**, **Application (client) ID**, and **Client secret** for later use. + +**3. Step 2 - Create a role for data collection in Finance and Operations** + +1. In the Finance and Operations portal, navigate to **Workspaces > System administration** and click **Security Configuration** +2. Under **Roles** click **Create new** and give the new role a name e.g. Database Log Viewer. +3. Select the new role in the list of roles and click **Privileges** and than **Add references**. +4. Select **Database log Entity View** from the list of privileges. +5. Click on **Unpublished objects** and then **Publish all** to publish the role. + +**4. Step 3 - Create a user for data collection in Finance and Operations** + +1. In the Finance and Operations portal, navigate to **Modules > System administration** and click **Users** +2. Create a new user and assign the role created in the previous step to the user. + +**5. Step 4 - Register the Microsoft Entra app in Finance and Operations** + +1. In the F&O portal, navigate to **System administration > Setup > Microsoft Entra applications** (Azure Active Directory applications) +2. Create a new entry in the table. In the **Client Id** field, enter the application ID of the app registered in Step 1. +3. In the **Name** field, enter a name for the application. +4. In the **User ID** field, select the user ID created in the previous step. + +**6. Connect events from Dyanmics 365 Finance and Operations to Microsoft Sentinel** + +Connect using client credentials +**Dynamics 365 Finance and Operations connection** + +When you click the "Add environment" button in the portal, a configuration form will open. You'll need to provide: + +*Environment details* + +- **Microsoft Entra tenant ID.** (optional): Tenant ID (GUID) +- **App registration client ID** (optional): Finance and Operations client ID +- **App registration client secret** (optional): Finance and Operations client secret +- **Finance and Operations organization URL** (optional): https://dynamics-dev.axcloud.dynamics.com + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + + +**7. Organizations** + +Each row represents an Finance and Operations connection +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Environment URL** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + | | | |--------------------------|---| | **Tables Ingested** | `FinanceOperationsActivity_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-copilot.md b/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-copilot.md index c0fe4513257..f52a179d973 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-copilot.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-copilot.md @@ -21,6 +21,23 @@ This solution provides **1 data connector(s)**. The Microsoft Copilot logs connector in Microsoft Sentinel enables the seamless ingestion of Copilot-generated activity logs into Microsoft Sentinel for advanced threat detection, investigation, and response. It collects telemetry from Microsoft Copilot services - such as usage data, prompts and system responses - and ingests into Microsoft Sentinel, allowing security teams to monitor for misuse, detect anomalies, and maintain compliance with organizational policies. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. + +**Custom Permissions:** +- **Tenant Permissions**: 'Security Administrator' or 'Global Administrator' on the workspace's tenant. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Microsoft Copilot audit logs to Microsoft Sentinel** + +This connector uses the Office Management API to get your Microsoft Copilot audit logs. The logs will be stored and processed in your existing Microsoft Sentinel workspace. You can find the data in the **LLMActivity** table. +- Click 'Connect' to establish connection + | | | |--------------------------|---| | **Tables Ingested** | `LLMActivity` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-defender-for-cloud-apps.md b/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-defender-for-cloud-apps.md index d96e9c3eda1..29ad76c629e 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-defender-for-cloud-apps.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-defender-for-cloud-apps.md @@ -13,47 +13,8 @@ ## Data Connectors -This solution provides **1 data connector(s)**. +**This solution does not include data connectors.** -### [Microsoft Defender for Cloud Apps](../connectors/microsoftcloudappsecurity.md) - -**Publisher:** Microsoft - -By connecting with [Microsoft Defender for Cloud Apps](https://aka.ms/asi-mcas-connector-description) you will gain visibility into your cloud apps, get sophisticated analytics to identify and combat cyberthreats, and control how your data travels. - - - -- Identify shadow IT cloud apps on your network. - -- Control and limit access based on conditions and session context. - -- Use built-in or custom policies for data sharing and data loss prevention. - -- Identify high-risk use and get alerts for unusual user activities with Microsoft behavioral analytics and anomaly detection capabilities, including ransomware activity, impossible travel, suspicious email forwarding rules, and mass download of files. - -- Mass download of files - - - -[Deploy now >](https://aka.ms/asi-mcas-connector-deploynow) - -| | | -|--------------------------|---| -| **Tables Ingested** | `McasShadowItReporting` | -| | `SecurityAlert` | -| | `discoveryLogs` | -| **Connector Definition Files** | [MicrosoftCloudAppSecurity.JSON](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20for%20Cloud%20Apps/Data%20Connectors/MicrosoftCloudAppSecurity.JSON) | - -[→ View full connector details](../connectors/microsoftcloudappsecurity.md) - -## Tables Reference - -This solution ingests data into **3 table(s)**: - -| Table | Used By Connectors | -|-------|-------------------| -| `McasShadowItReporting` | [Microsoft Defender for Cloud Apps](../connectors/microsoftcloudappsecurity.md) | -| `SecurityAlert` | [Microsoft Defender for Cloud Apps](../connectors/microsoftcloudappsecurity.md) | -| `discoveryLogs` | [Microsoft Defender for Cloud Apps](../connectors/microsoftcloudappsecurity.md) | +This solution may contain other components such as analytics rules, workbooks, hunting queries, or playbooks. [← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-defender-for-cloud.md b/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-defender-for-cloud.md index faa22a46571..f5c7b68dfd0 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-defender-for-cloud.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-defender-for-cloud.md @@ -13,11 +13,7 @@ ## Data Connectors -This solution provides **2 data connector(s)**. - -### [Subscription-based Microsoft Defender for Cloud (Legacy)](../connectors/azuresecuritycenter.md) - -**Publisher:** Microsoft +This solution provides **1 data connector(s)**. ### [Tenant-based Microsoft Defender for Cloud](../connectors/microsoftdefenderforcloudtenantbased.md) @@ -25,6 +21,25 @@ This solution provides **2 data connector(s)**. Microsoft Defender for Cloud is a security management tool that allows you to detect and quickly respond to threats across Azure, hybrid, and multi-cloud workloads. This connector allows you to stream your MDC security alerts from Microsoft 365 Defender into Microsoft Sentinel, so you can can leverage the advantages of XDR correlations connecting the dots across your cloud resources, devices and identities and view the data in workbooks, queries and investigate and respond to incidents. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2269832&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. + +**Tenant Permissions:** +Requires SecurityAdmin, GlobalAdmin on the workspace's tenant + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Tenant-based Microsoft Defender for Cloud to Microsoft Sentinel** + +After connecting this connector, **all** your Microsoft Defender for Cloud subscriptions' alerts will be sent to this Microsoft Sentinel workspace. + +> Your Microsoft Defender for Cloud alerts are connected to stream through the Microsoft 365 Defender. To benefit from automated grouping of the alerts into incidents, connect the Microsoft 365 Defender incidents connector. Incidents can be viewed in the incidents queue. +Tenant-based Microsoft Defender for Cloud + | | | |--------------------------|---| | **Tables Ingested** | `SecurityAlert` | @@ -38,6 +53,6 @@ This solution ingests data into **1 table(s)**: | Table | Used By Connectors | |-------|-------------------| -| `SecurityAlert` | [Subscription-based Microsoft Defender for Cloud (Legacy)](../connectors/azuresecuritycenter.md), [Tenant-based Microsoft Defender for Cloud](../connectors/microsoftdefenderforcloudtenantbased.md) | +| `SecurityAlert` | [Tenant-based Microsoft Defender for Cloud](../connectors/microsoftdefenderforcloudtenantbased.md) | [← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-defender-for-identity.md b/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-defender-for-identity.md index 8dcf6f3876e..1e50dc31c92 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-defender-for-identity.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-defender-for-identity.md @@ -13,49 +13,8 @@ ## Data Connectors -This solution provides **1 data connector(s)**. +**This solution does not include data connectors.** -### [Microsoft Defender for Identity](../connectors/azureadvancedthreatprotection.md) - -**Publisher:** Microsoft - -Connect Microsoft Defender for Identity to gain visibility into the events and user analytics. Microsoft Defender for Identity identifies, detects, and helps you investigate advanced threats, compromised identities, and malicious insider actions directed at your organization. Microsoft Defender for Identity enables SecOp analysts and security professionals struggling to detect advanced attacks in hybrid environments to: - - - -- Monitor users, entity behavior, and activities with learning-based analytics​ - -- Protect user identities and credentials stored in Active Directory - -- Identify and investigate suspicious user activities and advanced attacks throughout the kill chain - -- Provide clear incident information on a simple timeline for fast triage - - - -[Try now >](https://aka.ms/AtpTryNow) - - - -[Deploy now >](https://aka.ms/AzureATP_Deploy) - - - -For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2220069&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -| | | -|--------------------------|---| -| **Tables Ingested** | `SecurityAlert` | -| **Connector Definition Files** | [MicrosoftDefenderforIdentity.JSON](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20For%20Identity/Data%20Connectors/MicrosoftDefenderforIdentity.JSON) | - -[→ View full connector details](../connectors/azureadvancedthreatprotection.md) - -## Tables Reference - -This solution ingests data into **1 table(s)**: - -| Table | Used By Connectors | -|-------|-------------------| -| `SecurityAlert` | [Microsoft Defender for Identity](../connectors/azureadvancedthreatprotection.md) | +This solution may contain other components such as analytics rules, workbooks, hunting queries, or playbooks. [← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-defender-for-office-365.md b/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-defender-for-office-365.md index 5eb159b8514..9f5fc5e607d 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-defender-for-office-365.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-defender-for-office-365.md @@ -47,6 +47,26 @@ These alerts can be seen by Office customers in the ** Office Security and Compl For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2219942&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. + +**Licenses:** +- Office Advanced Threat Protection + +**Tenant Permissions:** +Requires GlobalAdmin, SecurityAdmin on the workspace's tenant + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Microsoft Defender for Office 365 alerts to Microsoft Sentinel** + +Connecting Microsoft Defender for Office 365 will cause your data that is collected by Microsoft Defender for Office 365 service to be stored and processed in the location that you have configured your Microsoft Sentinel workspace. +- Connect Microsoft Defender for Office 365 + | | | |--------------------------|---| | **Tables Ingested** | `SecurityAlert` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-defender-xdr.md b/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-defender-xdr.md index b419b9ef0ed..cb85ee51210 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-defender-xdr.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-defender-xdr.md @@ -13,87 +13,8 @@ ## Data Connectors -This solution provides **1 data connector(s)**. +**This solution does not include data connectors.** -### [Microsoft Defender XDR](../connectors/microsoftthreatprotection.md) - -**Publisher:** Microsoft - -Microsoft Defender XDR is a unified, natively integrated, pre- and post-breach enterprise defense suite that protects endpoint, identity, email, and applications and helps you detect, prevent, investigate, and automatically respond to sophisticated threats. - - - -Microsoft Defender XDR suite includes: - -- Microsoft Defender for Endpoint - -- Microsoft Defender for Identity - -- Microsoft Defender for Office 365 - -- Threat & Vulnerability Management - -- Microsoft Defender for Cloud Apps - - - -For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220004&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -| | | -|--------------------------|---| -| **Tables Ingested** | `AlertEvidence` | -| | `CloudAppEvents` | -| | `DeviceEvents` | -| | `DeviceFileCertificateInfo` | -| | `DeviceFileEvents` | -| | `DeviceImageLoadEvents` | -| | `DeviceInfo` | -| | `DeviceLogonEvents` | -| | `DeviceNetworkEvents` | -| | `DeviceNetworkInfo` | -| | `DeviceProcessEvents` | -| | `DeviceRegistryEvents` | -| | `EmailAttachmentInfo` | -| | `EmailEvents` | -| | `EmailPostDeliveryEvents` | -| | `EmailUrlInfo` | -| | `IdentityDirectoryEvents` | -| | `IdentityLogonEvents` | -| | `IdentityQueryEvents` | -| | `SecurityAlert` | -| | `SecurityIncident` | -| | `UrlClickEvents` | -| **Connector Definition Files** | [MicrosoftThreatProtection.JSON](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR/Data%20Connectors/MicrosoftThreatProtection.JSON) | - -[→ View full connector details](../connectors/microsoftthreatprotection.md) - -## Tables Reference - -This solution ingests data into **22 table(s)**: - -| Table | Used By Connectors | -|-------|-------------------| -| `AlertEvidence` | [Microsoft Defender XDR](../connectors/microsoftthreatprotection.md) | -| `CloudAppEvents` | [Microsoft Defender XDR](../connectors/microsoftthreatprotection.md) | -| `DeviceEvents` | [Microsoft Defender XDR](../connectors/microsoftthreatprotection.md) | -| `DeviceFileCertificateInfo` | [Microsoft Defender XDR](../connectors/microsoftthreatprotection.md) | -| `DeviceFileEvents` | [Microsoft Defender XDR](../connectors/microsoftthreatprotection.md) | -| `DeviceImageLoadEvents` | [Microsoft Defender XDR](../connectors/microsoftthreatprotection.md) | -| `DeviceInfo` | [Microsoft Defender XDR](../connectors/microsoftthreatprotection.md) | -| `DeviceLogonEvents` | [Microsoft Defender XDR](../connectors/microsoftthreatprotection.md) | -| `DeviceNetworkEvents` | [Microsoft Defender XDR](../connectors/microsoftthreatprotection.md) | -| `DeviceNetworkInfo` | [Microsoft Defender XDR](../connectors/microsoftthreatprotection.md) | -| `DeviceProcessEvents` | [Microsoft Defender XDR](../connectors/microsoftthreatprotection.md) | -| `DeviceRegistryEvents` | [Microsoft Defender XDR](../connectors/microsoftthreatprotection.md) | -| `EmailAttachmentInfo` | [Microsoft Defender XDR](../connectors/microsoftthreatprotection.md) | -| `EmailEvents` | [Microsoft Defender XDR](../connectors/microsoftthreatprotection.md) | -| `EmailPostDeliveryEvents` | [Microsoft Defender XDR](../connectors/microsoftthreatprotection.md) | -| `EmailUrlInfo` | [Microsoft Defender XDR](../connectors/microsoftthreatprotection.md) | -| `IdentityDirectoryEvents` | [Microsoft Defender XDR](../connectors/microsoftthreatprotection.md) | -| `IdentityLogonEvents` | [Microsoft Defender XDR](../connectors/microsoftthreatprotection.md) | -| `IdentityQueryEvents` | [Microsoft Defender XDR](../connectors/microsoftthreatprotection.md) | -| `SecurityAlert` | [Microsoft Defender XDR](../connectors/microsoftthreatprotection.md) | -| `SecurityIncident` | [Microsoft Defender XDR](../connectors/microsoftthreatprotection.md) | -| `UrlClickEvents` | [Microsoft Defender XDR](../connectors/microsoftthreatprotection.md) | +This solution may contain other components such as analytics rules, workbooks, hunting queries, or playbooks. [← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-entra-id-protection.md b/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-entra-id-protection.md index f1b0cc8ef85..78e9919809c 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-entra-id-protection.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-entra-id-protection.md @@ -13,31 +13,8 @@ ## Data Connectors -This solution provides **1 data connector(s)**. +**This solution does not include data connectors.** -### [Microsoft Entra ID Protection](../connectors/azureactivedirectoryidentityprotection.md) - -**Publisher:** Microsoft - -Microsoft Entra ID Protection provides a consolidated view at risk users, risk events and vulnerabilities, with the ability to remediate risk immediately, and set policies to auto-remediate future events. The service is built on Microsoft’s experience protecting consumer identities and gains tremendous accuracy from the signal from over 13 billion logins a day. Integrate Microsoft Microsoft Entra ID Protection alerts with Microsoft Sentinel to view dashboards, create custom alerts, and improve investigation. For more information, see the [Microsoft Sentinel documentation ](https://go.microsoft.com/fwlink/p/?linkid=2220065&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - - - -[Get Microsoft Entra ID Premium P1/P2 ](https://aka.ms/asi-ipcconnectorgetlink) - -| | | -|--------------------------|---| -| **Tables Ingested** | `SecurityAlert` | -| **Connector Definition Files** | [template_AzureActiveDirectoryIdentityProtection.JSON](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID%20Protection/Data%20Connectors/template_AzureActiveDirectoryIdentityProtection.JSON) | - -[→ View full connector details](../connectors/azureactivedirectoryidentityprotection.md) - -## Tables Reference - -This solution ingests data into **1 table(s)**: - -| Table | Used By Connectors | -|-------|-------------------| -| `SecurityAlert` | [Microsoft Entra ID Protection](../connectors/azureactivedirectoryidentityprotection.md) | +This solution may contain other components such as analytics rules, workbooks, hunting queries, or playbooks. [← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-entra-id.md b/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-entra-id.md index afbcdf71263..9b87eccffb0 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-entra-id.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-entra-id.md @@ -13,69 +13,8 @@ ## Data Connectors -This solution provides **1 data connector(s)**. +**This solution does not include data connectors.** -### [Microsoft Entra ID](../connectors/azureactivedirectory.md) - -**Publisher:** Microsoft - -Gain insights into Microsoft Entra ID by connecting Audit and Sign-in logs to Microsoft Sentinel to gather insights around Microsoft Entra ID scenarios. You can learn about app usage, conditional access policies, legacy auth relate details using our Sign-in logs. You can get information on your Self Service Password Reset (SSPR) usage, Microsoft Entra ID Management activities like user, group, role, app management using our Audit logs table. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/?linkid=2219715&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -| | | -|--------------------------|---| -| **Tables Ingested** | `AADManagedIdentitySignInLogs` | -| | `AADNonInteractiveUserSignInLogs` | -| | `AADProvisioningLogs` | -| | `AADRiskyServicePrincipals` | -| | `AADRiskyUsers` | -| | `AADServicePrincipalRiskEvents` | -| | `AADServicePrincipalSignInLogs` | -| | `AADUserRiskEvents` | -| | `ADFSSignInLogs` | -| | `AuditLogs` | -| | `ManagedIdentitySignInLogs` | -| | `NetworkAccessTraffic` | -| | `NetworkAccessTrafficLogs` | -| | `NonInteractiveUserSignInLogs` | -| | `ProvisioningLogs` | -| | `RiskyServicePrincipals` | -| | `RiskyUsers` | -| | `ServicePrincipalRiskEvents` | -| | `ServicePrincipalSignInLogs` | -| | `SignInLogs` | -| | `SigninLogs` | -| | `UserRiskEvents` | -| **Connector Definition Files** | [template_AzureActiveDirectory.JSON](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID/Data%20Connectors/template_AzureActiveDirectory.JSON) | - -[→ View full connector details](../connectors/azureactivedirectory.md) - -## Tables Reference - -This solution ingests data into **22 table(s)**: - -| Table | Used By Connectors | -|-------|-------------------| -| `AADManagedIdentitySignInLogs` | [Microsoft Entra ID](../connectors/azureactivedirectory.md) | -| `AADNonInteractiveUserSignInLogs` | [Microsoft Entra ID](../connectors/azureactivedirectory.md) | -| `AADProvisioningLogs` | [Microsoft Entra ID](../connectors/azureactivedirectory.md) | -| `AADRiskyServicePrincipals` | [Microsoft Entra ID](../connectors/azureactivedirectory.md) | -| `AADRiskyUsers` | [Microsoft Entra ID](../connectors/azureactivedirectory.md) | -| `AADServicePrincipalRiskEvents` | [Microsoft Entra ID](../connectors/azureactivedirectory.md) | -| `AADServicePrincipalSignInLogs` | [Microsoft Entra ID](../connectors/azureactivedirectory.md) | -| `AADUserRiskEvents` | [Microsoft Entra ID](../connectors/azureactivedirectory.md) | -| `ADFSSignInLogs` | [Microsoft Entra ID](../connectors/azureactivedirectory.md) | -| `AuditLogs` | [Microsoft Entra ID](../connectors/azureactivedirectory.md) | -| `ManagedIdentitySignInLogs` | [Microsoft Entra ID](../connectors/azureactivedirectory.md) | -| `NetworkAccessTraffic` | [Microsoft Entra ID](../connectors/azureactivedirectory.md) | -| `NetworkAccessTrafficLogs` | [Microsoft Entra ID](../connectors/azureactivedirectory.md) | -| `NonInteractiveUserSignInLogs` | [Microsoft Entra ID](../connectors/azureactivedirectory.md) | -| `ProvisioningLogs` | [Microsoft Entra ID](../connectors/azureactivedirectory.md) | -| `RiskyServicePrincipals` | [Microsoft Entra ID](../connectors/azureactivedirectory.md) | -| `RiskyUsers` | [Microsoft Entra ID](../connectors/azureactivedirectory.md) | -| `ServicePrincipalRiskEvents` | [Microsoft Entra ID](../connectors/azureactivedirectory.md) | -| `ServicePrincipalSignInLogs` | [Microsoft Entra ID](../connectors/azureactivedirectory.md) | -| `SignInLogs` | [Microsoft Entra ID](../connectors/azureactivedirectory.md) | -| `SigninLogs` | [Microsoft Entra ID](../connectors/azureactivedirectory.md) | -| `UserRiskEvents` | [Microsoft Entra ID](../connectors/azureactivedirectory.md) | +This solution may contain other components such as analytics rules, workbooks, hunting queries, or playbooks. [← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-exchange-security---exchange-on-premises.md b/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-exchange-security---exchange-on-premises.md index 8a1367a34a8..d74de4c2ee7 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-exchange-security---exchange-on-premises.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-exchange-security---exchange-on-premises.md @@ -49,6 +49,425 @@ This solution provides **8 data connector(s)**. [Option 7] - Using Azure Monitor Agent - You can stream HTTP Proxy logs and Security Event logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you create custom alerts, and improve investigation. [Learn more](https://aka.ms/ESI_DataConnectorOptions) +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Azure Log Analytics will be deprecated**: Azure Log Analytics will be deprecated, to collect data from non-Azure VMs, Azure Arc is recommended. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc) +- **Detailled documentation**: >**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub) + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions) + +>This Data Connector is the **option 7** of the wiki. + +**1. Download and install the agents needed to collect logs for Microsoft Sentinel** + +Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy. +**Deploy Monitor Agents** + + This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers +**Deploy the Azure Arc Agent** +> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc) + +**2. [Option 7] HTTP Proxy of Exchange Servers** + +Select how to stream HTTP Proxy of Exchange Servers +**Data Collection Rules - When Azure Monitor Agent is used** + + **Enable data collection rule** +> Message Tracking are collected only from **Windows** agents. +**Option 1 - Azure Resource Manager (ARM) Template (Prefered Method)** + + Use this method for automated deployment of the DCE and DCR. +**A. Create DCE (If not already created for Exchange Servers)** + + 1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. You can change the proposed name of the DCE. +5. Click **Create** to deploy. + + **B. Deploy Data Connection Rule** + + 1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption7-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID** 'and/or Other required fields'. +>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + + **Option 2 - Manual Deployment of Azure Automation** + + Use the following step-by-step instructions to deploy manually a Data Collection Rule. +**Create Custom Table - Explanation** + + The Custom Table can't be created using the Azure Portal. You need to use an ARM template, a PowerShell Script or another method [described here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/create-custom-table?tabs=azure-powershell-1%2Cazure-portal-2%2Cazure-portal-3#create-a-custom-table). + + **Create Custom Table using an ARM Template** + + 1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-HTTPProxyCustomTable) +2. Select the preferred **Subscription**, **Resource Group**, **Location** and **Analytic Workspace Name**. +3. Click **Create** to deploy. + + **Create Custom Table using PowerShell in Cloud Shell** + + 1. From the Azure Portal, open a Cloud Shell. +2. Copy and paste and Execute the following script in the Cloud Shell to create the table. + $tableParams = @' + { + "properties": { + "schema": { + "name": "ExchangeHttpProxy_CL", + "columns": [ + { + "name": "AccountForestLatencyBreakup", + "type": "string" + }, + { + "name": "ActivityContextLifeTime", + "type": "string" + }, + { + "name": "ADLatency", + "type": "string" + }, + { + "name": "AnchorMailbox", + "type": "string" + }, + { + "name": "AuthenticatedUser", + "type": "string" + }, + { + "name": "AuthenticationType", + "type": "string" + }, + { + "name": "AuthModulePerfContext", + "type": "string" + }, + { + "name": "BackEndCookie", + "type": "string" + }, + { + "name": "BackEndGenericInfo", + "type": "string" + }, + { + "name": "BackendProcessingLatency", + "type": "string" + }, + { + "name": "BackendReqInitLatency", + "type": "string" + }, + { + "name": "BackendReqStreamLatency", + "type": "string" + }, + { + "name": "BackendRespInitLatency", + "type": "string" + }, + { + "name": "BackendRespStreamLatency", + "type": "string" + }, + { + "name": "BackEndStatus", + "type": "string" + }, + { + "name": "BuildVersion", + "type": "string" + }, + { + "name": "CalculateTargetBackEndLatency", + "type": "string" + }, + { + "name": "ClientIpAddress", + "type": "string" + }, + { + "name": "ClientReqStreamLatency", + "type": "string" + }, + { + "name": "ClientRequestId", + "type": "string" + }, + { + "name": "ClientRespStreamLatency", + "type": "string" + }, + { + "name": "CoreLatency", + "type": "string" + }, + { + "name": "DatabaseGuid", + "type": "string" + }, + { + "name": "EdgeTraceId", + "type": "string" + }, + { + "name": "ErrorCode", + "type": "string" + }, + { + "name": "GenericErrors", + "type": "string" + }, + { + "name": "GenericInfo", + "type": "string" + }, + { + "name": "GlsLatencyBreakup", + "type": "string" + }, + { + "name": "HandlerCompletionLatency", + "type": "string" + }, + { + "name": "HandlerToModuleSwitchingLatency", + "type": "string" + }, + { + "name": "HttpPipelineLatency", + "type": "string" + }, + { + "name": "HttpProxyOverhead", + "type": "string" + }, + { + "name": "HttpStatus", + "type": "string" + }, + { + "name": "IsAuthenticated", + "type": "string" + }, + { + "name": "KerberosAuthHeaderLatency", + "type": "string" + }, + { + "name": "MajorVersion", + "type": "string" + }, + { + "name": "Method", + "type": "string" + }, + { + "name": "MinorVersion", + "type": "string" + }, + { + "name": "ModuleToHandlerSwitchingLatency", + "type": "string" + }, + { + "name": "Organization", + "type": "string" + }, + { + "name": "PartitionEndpointLookupLatency", + "type": "string" + }, + { + "name": "Protocol", + "type": "string" + }, + { + "name": "ProtocolAction", + "type": "string" + }, + { + "name": "ProxyAction", + "type": "string" + }, + { + "name": "ProxyTime", + "type": "string" + }, + { + "name": "RequestBytes", + "type": "string" + }, + { + "name": "RequestHandlerLatency", + "type": "string" + }, + { + "name": "RequestId", + "type": "string" + }, + { + "name": "ResourceForestLatencyBreakup", + "type": "string" + }, + { + "name": "ResponseBytes", + "type": "string" + }, + { + "name": "RevisionVersion", + "type": "string" + }, + { + "name": "RouteRefresherLatency", + "type": "string" + }, + { + "name": "RoutingHint", + "type": "string" + }, + { + "name": "RoutingLatency", + "type": "string" + }, + { + "name": "RoutingStatus", + "type": "string" + }, + { + "name": "RoutingType", + "type": "string" + }, + { + "name": "ServerHostName", + "type": "string" + }, + { + "name": "ServerLocatorHost", + "type": "string" + }, + { + "name": "ServerLocatorLatency", + "type": "string" + }, + { + "name": "SharedCacheLatencyBreakup", + "type": "string" + }, + { + "name": "TargetOutstandingRequests", + "type": "string" + }, + { + "name": "TargetServer", + "type": "string" + }, + { + "name": "TargetServerVersion", + "type": "string" + }, + { + "name": "TotalAccountForestLatency", + "type": "string" + }, + { + "name": "TotalGlsLatency", + "type": "string" + }, + { + "name": "TotalRequestTime", + "type": "string" + }, + { + "name": "TotalResourceForestLatency", + "type": "string" + }, + { + "name": "TotalSharedCacheLatency", + "type": "string" + }, + { + "name": "UrlHost", + "type": "string" + }, + { + "name": "UrlQuery", + "type": "string" + }, + { + "name": "UrlStem", + "type": "string" + }, + { + "name": "UserADObjectGuid", + "type": "string" + }, + { + "name": "UserAgent", + "type": "string" + }, + { + "name": "TimeGenerated", + "type": "datetime" + }, + { + "name": "FilePath", + "type": "string" + } + ] + } + } + } + '@ +3. Copy, Replace, Paste and execute the following parameters with your own values: + $SubscriptionID = 'YourGUID' + $ResourceGroupName = 'YourResourceGroupName' + $WorkspaceName = 'YourWorkspaceName' +4. Execute the Following Cmdlet to create the table: + Invoke-AzRestMethod -Path "/subscriptions/$SubscriptionID/resourcegroups/$ResourceGroupName/providers/microsoft.operationalinsights/workspaces/$WorkspaceName/tables/ExchangeHttpProxy_CL?api-version=2021-12-01-preview" -Method PUT -payload $tableParams +**A. Create DCE (If not already created for Exchange Servers)** + + 1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints). +2. Click **+ Create** at the top. +3. In the **Basics** tab, fill the required fields and give a name to the DCE. +3. 'Make other preferable configuration changes', if needed, then click **Create**. + + **B. Create a DCR, Type Custom log** + + 1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules). +2. Click on 'Create' button. +3. On 'Basics' tab, fill the Rule name like **DCR-Option7-HTTPProxyLogs**, select the 'Data Collection Endpoint' with the previously created endpoint and fill other parameters. +4. In the **Resources** tab, add your Exchange Servers. +5. In **Collect and Deliver**, add a Data Source type 'Custom Text logs' and enter the following file pattern : + 'C:\Program Files\Microsoft\Exchange Server\V15\Logging\HttpProxy\Autodiscover\*.log','C:\Program Files\Microsoft\Exchange Server\V15\Logging\HttpProxy\Eas\*.log','C:\Program Files\Microsoft\Exchange Server\V15\Logging\HttpProxy\Ecp\*.log','C:\Program Files\Microsoft\Exchange Server\V15\Logging\HttpProxy\Ews\*.log','C:\Program Files\Microsoft\Exchange Server\V15\Logging\HttpProxy\Mapi\*.log','C:\Program Files\Microsoft\Exchange Server\V15\Logging\HttpProxy\Oab\*.log','C:\Program Files\Microsoft\Exchange Server\V15\Logging\HttpProxy\Owa\*.log','C:\Program Files\Microsoft\Exchange Server\V15\Logging\HttpProxy\OwaCalendar\*.log','C:\Program Files\Microsoft\Exchange Server\V15\Logging\HttpProxy\PowerShell\*.log','C:\Program Files\Microsoft\Exchange Server\V15\Logging\HttpProxy\RpcHttp\*.log' +6. Put 'ExchangeHttpProxy_CL' in Table Name. +7. in Transform field, enter the following KQL request : + source | extend d = split(RawData,',') | extend DateTime=todatetime(d[0]),RequestId=tostring(d[1]) ,MajorVersion=tostring(d[2]) ,MinorVersion=tostring(d[3]) ,BuildVersion=tostring(d[4]) ,RevisionVersion=tostring(d[5]) ,ClientRequestId=tostring(d[6]) ,Protocol=tostring(d[7]) ,UrlHost=tostring(d[8]) ,UrlStem=tostring(d[9]) ,ProtocolAction=tostring(d[10]) ,AuthenticationType=tostring(d[11]) ,IsAuthenticated=tostring(d[12]) ,AuthenticatedUser=tostring(d[13]) ,Organization=tostring(d[14]) ,AnchorMailbox=tostring(d[15]) ,UserAgent=tostring(d[16]) ,ClientIpAddress=tostring(d[17]) ,ServerHostName=tostring(d[18]) ,HttpStatus=tostring(d[19]) ,BackEndStatus=tostring(d[20]) ,ErrorCode=tostring(d[21]) ,Method=tostring(d[22]) ,ProxyAction=tostring(d[23]) ,TargetServer=tostring(d[24]) ,TargetServerVersion=tostring(d[25]) ,RoutingType=tostring(d[26]) ,RoutingHint=tostring(d[27]) ,BackEndCookie=tostring(d[28]) ,ServerLocatorHost=tostring(d[29]) ,ServerLocatorLatency=tostring(d[30]) ,RequestBytes=tostring(d[31]) ,ResponseBytes=tostring(d[32]) ,TargetOutstandingRequests=tostring(d[33]) ,AuthModulePerfContext=tostring(d[34]) ,HttpPipelineLatency=tostring(d[35]) ,CalculateTargetBackEndLatency=tostring(d[36]) ,GlsLatencyBreakup=tostring(d[37]) ,TotalGlsLatency=tostring(d[38]) ,AccountForestLatencyBreakup=tostring(d[39]) ,TotalAccountForestLatency=tostring(d[40]) ,ResourceForestLatencyBreakup=tostring(d[41]) ,TotalResourceForestLatency=tostring(d[42]) ,ADLatency=tostring(d[43]) ,SharedCacheLatencyBreakup=tostring(d[44]) ,TotalSharedCacheLatency=tostring(d[45]) ,ActivityContextLifeTime=tostring(d[46]) ,ModuleToHandlerSwitchingLatency=tostring(d[47]) ,ClientReqStreamLatency=tostring(d[48]) ,BackendReqInitLatency=tostring(d[49]) ,BackendReqStreamLatency=tostring(d[50]) ,BackendProcessingLatency=tostring(d[51]) ,BackendRespInitLatency=tostring(d[52]) ,BackendRespStreamLatency=tostring(d[53]) ,ClientRespStreamLatency=tostring(d[54]) ,KerberosAuthHeaderLatency=tostring(d[55]) ,HandlerCompletionLatency=tostring(d[56]) ,RequestHandlerLatency=tostring(d[57]) ,HandlerToModuleSwitchingLatency=tostring(d[58]) ,ProxyTime=tostring(d[59]) ,CoreLatency=tostring(d[60]) ,RoutingLatency=tostring(d[61]) ,HttpProxyOverhead=tostring(d[62]) ,TotalRequestTime=tostring(d[63]) ,RouteRefresherLatency=tostring(d[64]) ,UrlQuery=tostring(d[65]) ,BackEndGenericInfo=tostring(d[66]) ,GenericInfo=tostring(d[67]) ,GenericErrors=tostring(d[68]) ,EdgeTraceId=tostring(d[69]) ,DatabaseGuid=tostring(d[70]) ,UserADObjectGuid=tostring(d[71]) ,PartitionEndpointLookupLatency=tostring(d[72]) ,RoutingStatus=tostring(d[73]) | extend TimeGenerated = DateTime | project-away d,RawData,DateTime | project-away d,RawData,DateTime + and click on 'Destination'. +8. In 'Destination', add a destination and select the Workspace where you have previously created the Custom Table +9. Click on 'Add data source'. +10. Fill other required parameters and tags and create the DCR + + **Assign the DCR to all Exchange Servers** + + Add all your Exchange Servers to the DCR + | | | |--------------------------|---| | **Tables Ingested** | `ExchangeHttpProxy_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-exchange-security---exchange-online.md b/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-exchange-security---exchange-online.md index a2e65455ce6..a7b841fac99 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-exchange-security---exchange-online.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-exchange-security---exchange-online.md @@ -21,6 +21,161 @@ This solution provides **1 data connector(s)**. Connector used to push Exchange Online Security configuration for Microsoft Sentinel Analysis +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **microsoft.automation/automationaccounts permissions**: Read and write permissions to create an Azure Automation with a Runbook is required. [See the documentation to learn more about Automation Account](https://learn.microsoft.com/en-us/azure/automation/overview). +- **Microsoft.Graph permissions**: Groups.Read, Users.Read and Auditing.Read permissions are required to retrieve user/group information linked to Exchange Online assignments. [See the documentation to learn more](https://aka.ms/sentinel-ESI-OnlineCollectorPermissions). +- **Exchange Online permissions**: Exchange.ManageAsApp permission and **Global Reader** or **Security Reader** Role are needed to retrieve the Exchange Online Security Configuration.[See the documentation to learn more](https://aka.ms/sentinel-ESI-OnlineCollectorPermissions). +- **(Optional) Log Storage permissions**: Storage Blob Data Contributor to a storage account linked to the Automation Account Managed identity or an Application ID is mandatory to store logs.[See the documentation to learn more](https://aka.ms/sentinel-ESI-OnlineCollectorPermissions). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE - UPDATE** + +ℹ️

NOTE - UPDATE:

We recommend to Update the Collector to Version 7.6.0.0 or highier.
The Collector Script Update procedure could be found here : ESI Online Collector Update + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Follow the steps for each Parser to create the Kusto Functions alias : [**ExchangeConfiguration**](https://aka.ms/sentinel-ESI-ExchangeConfiguration-Online-parser) and [**ExchangeEnvironmentList**](https://aka.ms/sentinel-ESI-ExchangeEnvironmentList-Online-parser) + +**STEP 1 - Parsers deployment** +**Parser deployment (When using Microsoft Exchange Security Solution, Parsers are automatically deployed)** + +**1. Download the Parser files** + + The latest version of the 2 files [**ExchangeConfiguration.yaml**](https://aka.ms/sentinel-ESI-ExchangeConfiguration-Online-parser) and [**ExchangeEnvironmentList.yaml**](https://aka.ms/sentinel-ESI-ExchangeEnvironmentList-Online-parser) + + **2. Create Parser **ExchangeConfiguration** function** + + In 'Logs' explorer of your Microsoft Sentinel's log analytics, copy the content of the file to Log explorer + + **3. Save Parser **ExchangeConfiguration** function** + + Click on save button. + Define the parameters as asked on the header of the parser file. +Click save again. + + **4. Reproduce the same steps for Parser **ExchangeEnvironmentList**** + + Reproduce the step 2 and 3 with the content of 'ExchangeEnvironmentList.yaml' file + +>**NOTE:** This connector uses Azure Automation to connect to 'Exchange Online' to pull its Security analysis into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Automation pricing page](https://azure.microsoft.com/pricing/details/automation/) for details. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Automation** + +>**IMPORTANT:** Before deploying the 'ESI Exchange Online Security Configuration' connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Exchange Online tenant name (contoso.onmicrosoft.com), readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the 'ESI Exchange Online Security Configuration' connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-ExchangeCollector-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID**, **Workspace Key**, **Tenant Name**, 'and/or Other required fields'. +>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**Option 2 - Manual Deployment of Azure Automation** + + Use the following step-by-step instructions to deploy the 'ESI Exchange Online Security Configuration' connector manually with Azure Automation. +**A. Create the Azure Automation Account** + + 1. From the Azure Portal, navigate to [Azure Automation Account](https://portal.azure.com/#view/HubsExtension/BrowseResource/resourceType/Microsoft.Automation%2FAutomationAccounts). +2. Click **+ Add** at the top. +3. In the **Basics** tab, fill the required fields and give a name to the Azure Automation. +4. In the **Advanced** and **Networking** and **Tags** Tabs, leave fields as default if you don't need to customize them. +5. 'Make other preferable configuration changes', if needed, then click **Create**. + + **B. Add Exchange Online Management Module, Microsoft Graph (Authentication, User and Group) Modules** + + 1. On the Automation Account page, select **Modules**. +2. Click on **Browse gallery** and search the **ExchangeOnlineManagement** module. +3. Select it and click on **Select**. +4. Choose Version **5.1** on Runtime version field and click on Import button. +Repeat the step for the following modules : 'Microsoft.Graph.Authentication', 'Microsoft.Graph.Users' and 'Microsoft.Graph.Groups. **Attention, you need to wait for Microsoft.Graph.Authentication installation before processing next modules** + + **C. Download the Runbook Content** + + 1. Download the latest version of ESI Collector. The latest version can be found here : https://aka.ms/ESI-ExchangeCollector-Script +2. Unzip the file to find the JSON file and the PS1 file for next step. + + **D. Create Runbook** + + 1. On the Automation Account page, select the **Runbooks** button. +2. Click on **Create a runbook** and name it like 'ESI-Collector' with a runbook type **PowerShell**, Runtime Version **5.1** and click 'Create'. +2. Import the content of the previous step's PS1 file in the Runbook window. +3. Click on **Publish** + + **E. Create GlobalConfiguration Variable** + + 1. On the Automation Account page, select the **Variables** button. +2. Click on **Add a Variable** and name it exaclty 'GlobalConfiguration' with a type **String**. +2. On 'Value' field, copy the content of the previous step's JSON file. +3. Inside the content, replace the values of **WorkspaceID** and **WorkspaceKey**. +4. Click on 'Create' button. + + **F. Create TenantName Variable** + + 1. On the Automation Account page, select the **Variables** button. +2. Click on **Add a Variable** and name it exaclty 'TenantName' with a type **String**. +3. On 'Value' field, write the tenant name of your Exchange Online. +4. Click on 'Create' button. + + **G. Create LastDateTracking Variable** + + 1. On the Automation Account page, select the **Variables** button. +2. Click on **Add a Variable** and name it exaclty 'LastDateTracking' with a type **String**. +3. On 'Value' field, write 'Never'. +4. Click on 'Create' button. + + **H. Create a Runbook Schedule** + + 1. On the Automation Account page, select the **Runbook** button and click on your created runbook. +2. Click on **Schedules** and **Add a schedule** button. +3. Click on **Schedule**, **Add a Schedule** and name it. Select **Recurring** value with a reccurence of every 1 day, click 'Create'. +4. Click on 'Configure parameters and run settings'. Leave all empty and click on **OK** and **OK** again. + +**STEP 3 - Assign Microsoft Graph Permission and Exchange Online Permission to Managed Identity Account** + +To be able to collect Exchange Online information and to be able to retrieve User information and memberlist of admin groups, the automation account need multiple permission. +**Assign Permissions by Script** + +**A. Download Permission Script** + + [Permission Update script](https://aka.ms/ESI-ExchangeCollector-Permissions) + + **B. Retrieve the Azure Automation Managed Identity GUID and insert it in the downloaded script** + + 1. Go to your Automation Account, in the **Identity** Section. You can find the Guid of your Managed Identity. +2. Replace the GUID in $MI_ID = "XXXXXXXXXXX" with the GUID of your Managed Identity. + + **C. Launch the script with a **Global-Administrator** account** + + **Attention this script requires MSGraph Modules and Admin Consent to access to your tenant with Microsoft Graph**. + The script will add 3 permissions to the Managed identity: + 1. Exchange Online ManageAsApp permission + 2. User.Read.All on Microsoft Graph API + 3. Group.Read.All on Microsoft Graph API + + **D. Exchange Online Role Assignment** + + 1. As a **Global Administrator**, go to **Roles and Administrators**. +2. Select **Global Reader** role or **Security Reader** and click to 'Add assignments'. +3. Click on 'No member selected' and search your Managed Identity account Name beginning by **the name of your automation account** like 'ESI-Collector'. Select it and click on 'Select'. +4. Click **Next** and validate the assignment by clicking **Assign**. + | | | |--------------------------|---| | **Tables Ingested** | `ESIExchangeOnlineConfig_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-powerbi.md b/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-powerbi.md index 6e023b31f2a..50879d9d452 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-powerbi.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-powerbi.md @@ -21,6 +21,26 @@ This solution provides **1 data connector(s)**. Microsoft PowerBI is a collection of software services, apps, and connectors that work together to turn your unrelated sources of data into coherent, visually immersive, and interactive insights. Your data may be an Excel spreadsheet, a collection of cloud-based and on-premises hybrid data warehouses, or a data store of some other type. This connector lets you stream PowerBI audit logs into Microsoft Sentinel, allowing you to track user activities in your PowerBI environment. You can filter the audit data by date range, user, dashboard, report, dataset, and activity type. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. + +**Custom Permissions:** +- **License**: Microsoft Power BI eligible license is required. + +**Tenant Permissions:** +Requires GlobalAdmin, SecurityAdmin on the workspace's tenant + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Microsoft PowerBI audit logs to Microsoft Sentinel** + +This connector uses the Office Management API to get your PowerBI audit logs. The logs will be stored and processed in your existing Microsoft Sentinel workspace. You can find the data in the **PowerBIActivity** table. +- Connect Microsoft PowerBI + | | | |--------------------------|---| | **Tables Ingested** | `PowerBIActivity` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-project.md b/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-project.md index e7ae2e30b65..b8441fb4b46 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-project.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-project.md @@ -13,27 +13,8 @@ ## Data Connectors -This solution provides **1 data connector(s)**. +**This solution does not include data connectors.** -### [Microsoft Project](../connectors/office365project.md) - -**Publisher:** Microsoft - -Microsoft Project (MSP) is a project management software solution. Depending on your plan, Microsoft Project lets you plan projects, assign tasks, manage resources, create reports and more. This connector allows you to stream your Azure Project audit logs into Microsoft Sentinel in order to track your project activities. - -| | | -|--------------------------|---| -| **Tables Ingested** | `ProjectActivity` | -| **Connector Definition Files** | [template_Office365Project.JSON](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Project/Data%20Connectors/template_Office365Project.JSON) | - -[→ View full connector details](../connectors/office365project.md) - -## Tables Reference - -This solution ingests data into **1 table(s)**: - -| Table | Used By Connectors | -|-------|-------------------| -| `ProjectActivity` | [Microsoft Project](../connectors/office365project.md) | +This solution may contain other components such as analytics rules, workbooks, hunting queries, or playbooks. [← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-purview-information-protection.md b/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-purview-information-protection.md index ca718147ef6..eabcbb0348a 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-purview-information-protection.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-purview-information-protection.md @@ -23,6 +23,23 @@ Microsoft Purview Information Protection helps you discover, classify, protect, Integrate Microsoft Purview Information Protection logs with Microsoft Sentinel to view dashboards, create custom alerts and improve investigation. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2223811&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions. + +**Custom Permissions:** +- **License**: Enterprise Mobility + Security E5/A5 or Microsoft 365 E5/A5 or P2 + +**Tenant Permissions:** +Requires GlobalAdmin, SecurityAdmin on the workspace's tenant + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Microsoft Purview Information Protection audit logs to Microsoft Sentinel** + | | | |--------------------------|---| | **Tables Ingested** | `MicrosoftPurviewInformationProtection` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-purview.md b/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-purview.md index f9658a27ea3..145d07136b1 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-purview.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-purview.md @@ -21,6 +21,30 @@ This solution provides **1 data connector(s)**. Connect to Microsoft Purview to enable data sensitivity enrichment of Microsoft Sentinel. Data classification and sensitivity label logs from Microsoft Purview scans can be ingested and visualized through workbooks, analytical rules, and more. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2224125&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Microsoft Purview account Owner or Contributor role to set up Diagnostic Settings. Microsoft Contributor role with write permissions to enable data connector, view workbook, and create analytic rules. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Microsoft Purview to Microsoft Sentinel** + +Within the Azure Portal, navigate to your Purview resource: + 1. In the search bar, search for **Purview accounts.** + 2. Select the specific account that you would like to be set up with Sentinel. + +Inside your Microsoft Purview resource: + 3. Select **Diagnostic Settings.** + 4. Select **+ Add diagnostic setting.** + 5. In the **Diagnostic setting** blade: + - Select the Log Category as **DataSensitivityLogEvent**. + - Select **Send to Log Analytics**. + - Chose the log destination workspace. This should be the same workspace that is used by **Microsoft Sentinel.** + - Click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `PurviewDataSensitivityLogs` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-sysmon-for-linux.md b/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-sysmon-for-linux.md index 4d3d21ced1c..8b431152594 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-sysmon-for-linux.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/microsoft-sysmon-for-linux.md @@ -23,6 +23,53 @@ This solution provides **1 data connector(s)**. [Sysmon for linux link:]. The Sysmon for Linux connector uses [Syslog](https://aka.ms/sysLogInfo) as its data ingestion method. This solution depends on ASIM to work as expected. [Deploy ASIM](https://aka.ms/DeployASIM) to get the full value from the solution. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>This data connector depends on ASIM parsers based on a Kusto Functions to work as expected. [Deploy the parsers](https://aka.ms/ASimSysmonForLinuxARM) + + The following functions will be deployed: + + - vimFileEventLinuxSysmonFileCreated, vimFileEventLinuxSysmonFileDeleted + + - vimProcessCreateLinuxSysmon, vimProcessTerminateLinuxSysmon + + - vimNetworkSessionLinuxSysmon + +[Read more](https://aka.ms/AboutASIM) + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + +1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. +2. Select **Apply below configuration to my machines** and select the facilities and severities. +3. Click **Save**. +- **Open Syslog settings** + | | | |--------------------------|---| | **Tables Ingested** | `Syslog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/microsoftdefenderforendpoint.md b/Tools/Solutions Analyzer/connector-docs/solutions/microsoftdefenderforendpoint.md index 5f19358f4e2..a1861ce2dbe 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/microsoftdefenderforendpoint.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/microsoftdefenderforendpoint.md @@ -13,27 +13,8 @@ ## Data Connectors -This solution provides **1 data connector(s)**. +**This solution does not include data connectors.** -### [Microsoft Defender for Endpoint](../connectors/microsoftdefenderadvancedthreatprotection.md) - -**Publisher:** Microsoft - -Microsoft Defender for Endpoint is a security platform designed to prevent, detect, investigate, and respond to advanced threats. The platform creates alerts when suspicious security events are seen in an organization. Fetch alerts generated in Microsoft Defender for Endpoint to Microsoft Sentinel so that you can effectively analyze security events. You can create rules, build dashboards and author playbooks for immediate response. For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2220128&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -| | | -|--------------------------|---| -| **Tables Ingested** | `SecurityAlert` | -| **Connector Definition Files** | [template_MicrosoftDefenderAdvancedThreatProtection.JSON](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MicrosoftDefenderForEndpoint/Data%20Connectors/template_MicrosoftDefenderAdvancedThreatProtection.JSON) | - -[→ View full connector details](../connectors/microsoftdefenderadvancedthreatprotection.md) - -## Tables Reference - -This solution ingests data into **1 table(s)**: - -| Table | Used By Connectors | -|-------|-------------------| -| `SecurityAlert` | [Microsoft Defender for Endpoint](../connectors/microsoftdefenderadvancedthreatprotection.md) | +This solution may contain other components such as analytics rules, workbooks, hunting queries, or playbooks. [← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/microsoftpurviewinsiderriskmanagement.md b/Tools/Solutions Analyzer/connector-docs/solutions/microsoftpurviewinsiderriskmanagement.md index 77ab7a8ad67..2237dbf33bc 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/microsoftpurviewinsiderriskmanagement.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/microsoftpurviewinsiderriskmanagement.md @@ -13,47 +13,8 @@ ## Data Connectors -This solution provides **1 data connector(s)**. +**This solution does not include data connectors.** -### [Microsoft 365 Insider Risk Management](../connectors/officeirm.md) - -**Publisher:** Microsoft - -Microsoft 365 Insider Risk Management is a compliance solution in Microsoft 365 that helps minimize internal risks by enabling you to detect, investigate, and act on malicious and inadvertent activities in your organization. Risk analysts in your organization can quickly take appropriate actions to make sure users are compliant with your organization's compliance standards. - - - -Insider risk policies allow you to: - - - -- define the types of risks you want to identify and detect in your organization. - -- decide on what actions to take in response, including escalating cases to Microsoft Advanced eDiscovery if needed. - - - -This solution produces alerts that can be seen by Office customers in the Insider Risk Management solution in Microsoft 365 Compliance Center. - -[Learn More](https://aka.ms/OfficeIRMConnector) about Insider Risk Management. - - - -These alerts can be imported into Microsoft Sentinel with this connector, allowing you to see, investigate, and respond to them in a broader organizational threat context. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2223721&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -| | | -|--------------------------|---| -| **Tables Ingested** | `SecurityAlert` | -| **Connector Definition Files** | [template_OfficeIRM.JSON](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MicrosoftPurviewInsiderRiskManagement/Data%20Connectors/template_OfficeIRM.JSON) | - -[→ View full connector details](../connectors/officeirm.md) - -## Tables Reference - -This solution ingests data into **1 table(s)**: - -| Table | Used By Connectors | -|-------|-------------------| -| `SecurityAlert` | [Microsoft 365 Insider Risk Management](../connectors/officeirm.md) | +This solution may contain other components such as analytics rules, workbooks, hunting queries, or playbooks. [← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/mimecast.md b/Tools/Solutions Analyzer/connector-docs/solutions/mimecast.md index 00587f84b1e..45f08edc445 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/mimecast.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/mimecast.md @@ -48,6 +48,118 @@ The Mimecast products included within the connector are: +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Azure Subscription**: Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group. +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: See the documentation to learn more about API on the [Rest API reference](https://integrations.mimecast.com/documentation/) + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Resource group** + +You need to have a resource group created with a subscription you are going to use. + +**2. Functions app** + +You need to have an Azure App registered for this connector to use +1. Application Id +2. Tenant Id +3. Client Id +4. Client Secret + +>**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - App Registration steps for the Application in Microsoft Entra ID** + + This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID: + 1. Sign in to the [Azure portal](https://portal.azure.com/). + 2. Search for and select **Microsoft Entra ID**. + 3. Under **Manage**, select **App registrations > New registration**. + 4. Enter a display **Name** for your application. + 5. Select **Register** to complete the initial app registration. + 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Mimecast Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app) + +**STEP 2 - Add a client secret for application in Microsoft Entra ID** + + Sometimes called an application password, a client secret is a string value required for the execution of Mimecast Data Connector. Follow the steps in this section to create a new Client Secret: + 1. In the Azure portal, in **App registrations**, select your application. + 2. Select **Certificates & secrets > Client secrets > New client secret**. + 3. Add a description for your client secret. + 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months. + 5. Select **Add**. + 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Mimecast Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret) + +**STEP 3 - Get Object ID of your application in Microsoft Entra ID** + + After creating your app registration, follow the steps in this section to get Object ID: + 1. Go to **Microsoft Entra ID**. + 2. Select **Enterprise applications** from the left menu. + 3. Find your newly created application in the list (you can search by the name you provided). + 4. Click on the application. + 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment. + +**STEP 4 - Deploy Mimecast API Connector** + +>**IMPORTANT:** Before deploying the Mimecast API connector, have the Mimecast API authorization key(s) or Token, readily available. + +**7. Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Mimecast Targeted Threat Protection Data connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastTTPAzureDeploy-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-MimecastTTPAzureDeploy-azuredeploy-gov) +2. Select the preferred **Subscription**, **Resource Group** and **Region**. +3. Enter the below information : + + a. Location - The location in which the data collection rules and data collection endpoints should be deployed + + b. WorkspaceName - Enter Microsoft Sentinel Workspace Name of Log Analytics workspace + + c. AzureClientID - Enter Azure Client ID that you have created during app registration + + d. AzureClientSecret - Enter Azure Client Secret that you have created during creating the client secret + + e. AzureTenantID - Enter Azure Tenant ID of your Azure Active Directory + + f. AzureEntraObjectID - Enter Object id of your Microsoft Entra App + + g. MimecastBaseURL - Enter Base URL of Mimecast API 2.0 (e.g. https://api.services.mimecast.com) + + h. MimecastClientID - Enter Mimecast Client ID for authentication + + i. MimecastClientSecret - Enter Mimecast Client Secret for authentication + + j. StartDate - Enter the start date in the 'yyyy-mm-dd' format. If you do not provide a date, data from the last 60 days will be fetched automatically. Ensure that the date is in the past and properly formatted + + k. MimecastTTPAttachmentTableName - Enter name of the table used to store TTP Attachment data. Default is 'Ttp_Attachment' + + l. MimecastTTPImpersonationTableName - Enter name of the table used to store TTP Impersonation data. Default is 'Ttp_Impersonation' + + m. MimecastTTPUrlTableName - Enter name of the table used to store TTP Attachment data. Default is 'Ttp_Url' + + n. Schedule - Please enter a valid Quartz cron-expression. (Example: 0 0 */1 * * *) Do not keep the value empty, minimum value is 10 minutes + + l. LogLevel - Please add log level or log severity value. By default it is set to INFO + + o. AppInsightsWorkspaceResourceId - Migrate Classic Application Insights to Log Analytic Workspace which is retiring by 29 Febraury 2024. Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + | | | |--------------------------|---| | **Tables Ingested** | `Ttp_Attachment_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/mimecastaudit.md b/Tools/Solutions Analyzer/connector-docs/solutions/mimecastaudit.md index 2ff156a58ac..14bb0ada3d7 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/mimecastaudit.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/mimecastaudit.md @@ -28,6 +28,85 @@ Audit & Authentication +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Mimecast API credentials**: You need to have the following pieces of information to configure the integration: +- mimecastEmail: Email address of a dedicated Mimecast admin user +- mimecastPassword: Password for the dedicated Mimecast admin user +- mimecastAppId: API Application Id of the Mimecast Microsoft Sentinel app registered with Mimecast +- mimecastAppKey: API Application Key of the Mimecast Microsoft Sentinel app registered with Mimecast +- mimecastAccessKey: Access Key for the dedicated Mimecast admin user +- mimecastSecretKey: Secret Key for the dedicated Mimecast admin user +- mimecastBaseURL: Mimecast Regional API Base URL + +> The Mimecast Application Id, Application Key, along with the Access Key and Secret keys for the dedicated Mimecast admin user are obtainable via the Mimecast Administration Console: Administration | Services | API and Platform Integrations. + +> The Mimecast API Base URL for each region is documented here: https://integrations.mimecast.com/documentation/api-overview/global-base-urls/ +- **Resource group**: You need to have a resource group created with a subscription you are going to use. +- **Functions app**: You need to have an Azure App registered for this connector to use +1. Application Id +2. Tenant Id +3. Client Id +4. Client Secret + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**1. Configuration:** + +**STEP 1 - Configuration steps for the Mimecast API** + +Go to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later) + +**STEP 2 - Deploy Mimecast API Connector** + +>**IMPORTANT:** Before deploying the Mimecast API connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Mimecast API authorization key(s) or Token, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Deploy the Mimecast Audit & Authentication Data Connector:** + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastAudit-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the following fields: + - appName: Unique string that will be used as id for the app in Azure platform + - objectId: Azure portal ---> Azure Active Directory ---> more info ---> Profile -----> Object ID + - appInsightsLocation(default): westeurope + - mimecastEmail: Email address of dedicated user for this integraion + - mimecastPassword: Password for dedicated user + - mimecastAppId: Application Id from the Microsoft Sentinel app registered with Mimecast + - mimecastAppKey: Application Key from the Microsoft Sentinel app registered with Mimecast + - mimecastAccessKey: Access Key for the dedicated Mimecast user + - mimecastSecretKey: Secret Key for dedicated Mimecast user + - mimecastBaseURL: Regional Mimecast API Base URL + - activeDirectoryAppId: Azure portal ---> App registrations ---> [your_app] ---> Application ID + - activeDirectoryAppSecret: Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> [your_app_secret] + - workspaceId: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Workspace ID (or you can copy workspaceId from above) + - workspaceKey: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Primary Key (or you can copy workspaceKey from above) + - AppInsightsWorkspaceResourceID : Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Properties ---> Resource ID + + >Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. + +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +6. Go to ***Azure portal ---> Resource groups ---> [your_resource_group] ---> [appName](type: Storage account) ---> Storage Explorer ---> BLOB CONTAINERS ---> Audit checkpoints ---> Upload*** and create empty file on your machine named checkpoint.txt and select it for upload (this is done so that date_range for SIEM logs is stored in consistent state) + | | | |--------------------------|---| | **Tables Ingested** | `MimecastAudit_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/mimecastseg.md b/Tools/Solutions Analyzer/connector-docs/solutions/mimecastseg.md index 26085fed767..a846c2a04aa 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/mimecastseg.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/mimecastseg.md @@ -28,6 +28,85 @@ The data connector for [Mimecast Secure Email Gateway](https://integrations.mime +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Mimecast API credentials**: You need to have the following pieces of information to configure the integration: +- mimecastEmail: Email address of a dedicated Mimecast admin user +- mimecastPassword: Password for the dedicated Mimecast admin user +- mimecastAppId: API Application Id of the Mimecast Microsoft Sentinel app registered with Mimecast +- mimecastAppKey: API Application Key of the Mimecast Microsoft Sentinel app registered with Mimecast +- mimecastAccessKey: Access Key for the dedicated Mimecast admin user +- mimecastSecretKey: Secret Key for the dedicated Mimecast admin user +- mimecastBaseURL: Mimecast Regional API Base URL + +> The Mimecast Application Id, Application Key, along with the Access Key and Secret keys for the dedicated Mimecast admin user are obtainable via the Mimecast Administration Console: Administration | Services | API and Platform Integrations. + +> The Mimecast API Base URL for each region is documented here: https://integrations.mimecast.com/documentation/api-overview/global-base-urls/ +- **Resource group**: You need to have a resource group created with a subscription you are going to use. +- **Functions app**: You need to have an Azure App registered for this connector to use +1. Application Id +2. Tenant Id +3. Client Id +4. Client Secret + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**1. Configuration:** + +**STEP 1 - Configuration steps for the Mimecast API** + +Go to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later) + +**STEP 2 - Deploy Mimecast API Connector** + +>**IMPORTANT:** Before deploying the Mimecast API connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Mimecast API authorization key(s) or Token, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Deploy the Mimecast Secure Email Gateway Data Connector:** + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastSEG-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the following fields: + - appName: Unique string that will be used as id for the app in Azure platform + - objectId: Azure portal ---> Azure Active Directory ---> more info ---> Profile -----> Object ID + - appInsightsLocation(default): westeurope + - mimecastEmail: Email address of dedicated user for this integraion + - mimecastPassword: Password for dedicated user + - mimecastAppId: Application Id from the Microsoft Sentinel app registered with Mimecast + - mimecastAppKey: Application Key from the Microsoft Sentinel app registered with Mimecast + - mimecastAccessKey: Access Key for the dedicated Mimecast user + - mimecastSecretKey: Secret Key for dedicated Mimecast user + - mimecastBaseURL: Regional Mimecast API Base URL + - activeDirectoryAppId: Azure portal ---> App registrations ---> [your_app] ---> Application ID + - activeDirectoryAppSecret: Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> [your_app_secret] + - workspaceId: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Workspace ID (or you can copy workspaceId from above) + - workspaceKey: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Primary Key (or you can copy workspaceKey from above) + - AppInsightsWorkspaceResourceID : Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Properties ---> Resource ID + + >Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. + +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +6. Go to ***Azure portal ---> Resource groups ---> [your_resource_group] ---> [appName](type: Storage account) ---> Storage Explorer ---> BLOB CONTAINERS ---> SIEM checkpoints ---> Upload*** and create empty file on your machine named checkpoint.txt, dlp-checkpoint.txt and select it for upload (this is done so that date_range for SIEM logs is stored in consistent state) + | | | |--------------------------|---| | **Tables Ingested** | `MimecastDLP_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/mimecasttiregional.md b/Tools/Solutions Analyzer/connector-docs/solutions/mimecasttiregional.md index 5749f18afc4..eb9a43c9489 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/mimecasttiregional.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/mimecasttiregional.md @@ -30,6 +30,89 @@ Mimecast products and features required: +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Mimecast API credentials**: You need to have the following pieces of information to configure the integration: +- mimecastEmail: Email address of a dedicated Mimecast admin user +- mimecastPassword: Password for the dedicated Mimecast admin user +- mimecastAppId: API Application Id of the Mimecast Microsoft Sentinel app registered with Mimecast +- mimecastAppKey: API Application Key of the Mimecast Microsoft Sentinel app registered with Mimecast +- mimecastAccessKey: Access Key for the dedicated Mimecast admin user +- mimecastSecretKey: Secret Key for the dedicated Mimecast admin user +- mimecastBaseURL: Mimecast Regional API Base URL + +> The Mimecast Application Id, Application Key, along with the Access Key and Secret keys for the dedicated Mimecast admin user are obtainable via the Mimecast Administration Console: Administration | Services | API and Platform Integrations. + +> The Mimecast API Base URL for each region is documented here: https://integrations.mimecast.com/documentation/api-overview/global-base-urls/ +- **Resource group**: You need to have a resource group created with a subscription you are going to use. +- **Functions app**: You need to have an Azure App registered for this connector to use +1. Application Id +2. Tenant Id +3. Client Id +4. Client Secret + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**1. Configuration:** + +**STEP 1 - Configuration steps for the Mimecast API** + +Go to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later) + +**STEP 2 - Deploy Mimecast API Connector** + +>**IMPORTANT:** Before deploying the Mimecast API connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Mimecast API authorization key(s) or Token, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Enable Mimecast Intelligence for Microsoft - Microsoft Sentinel Connector:** + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastTIRegional-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the following fields: + - appName: Unique string that will be used as id for the app in Azure platform + - objectId: Azure portal ---> Azure Active Directory ---> more info ---> Profile -----> Object ID + - appInsightsLocation(default): westeurope + - mimecastEmail: Email address of dedicated user for this integraion + - mimecastPassword: Password for dedicated user + - mimecastAppId: Application Id from the Microsoft Sentinel app registered with Mimecast + - mimecastAppKey: Application Key from the Microsoft Sentinel app registered with Mimecast + - mimecastAccessKey: Access Key for the dedicated Mimecast user + - mimecastSecretKey: Secret Key for dedicated Mimecast user + - mimecastBaseURL: Regional Mimecast API Base URL + - activeDirectoryAppId: Azure portal ---> App registrations ---> [your_app] ---> Application ID + - activeDirectoryAppSecret: Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> [your_app_secret] + - workspaceId: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Workspace ID (or you can copy workspaceId from above) + - workspaceKey: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Primary Key (or you can copy workspaceKey from above) + - AppInsightsWorkspaceResourceID : Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Properties ---> Resource ID + + >Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. + +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +6. Go to ***Azure portal ---> Resource groups ---> [your_resource_group] ---> [appName](type: Storage account) ---> Storage Explorer ---> BLOB CONTAINERS ---> TIR checkpoints ---> Upload*** and create empty file on your machine named checkpoint.txt and select it for upload (this is done so that date_range for TIR logs is stored in consistent state) + +**4. Additional configuration:** + +>Connect to a **Threat Intelligence Platforms** Data Connector. Follow instructions on the connector page and then click connect button. + | | | |--------------------------|---| | **Tables Ingested** | `Event` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/mimecastttp.md b/Tools/Solutions Analyzer/connector-docs/solutions/mimecastttp.md index 076854451e9..32da4a182a2 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/mimecastttp.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/mimecastttp.md @@ -32,6 +32,91 @@ The Mimecast products included within the connector are: +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: You need to have the following pieces of information to configure the integration: +- mimecastEmail: Email address of a dedicated Mimecast admin user +- mimecastPassword: Password for the dedicated Mimecast admin user +- mimecastAppId: API Application Id of the Mimecast Microsoft Sentinel app registered with Mimecast +- mimecastAppKey: API Application Key of the Mimecast Microsoft Sentinel app registered with Mimecast +- mimecastAccessKey: Access Key for the dedicated Mimecast admin user +- mimecastSecretKey: Secret Key for the dedicated Mimecast admin user +- mimecastBaseURL: Mimecast Regional API Base URL + +> The Mimecast Application Id, Application Key, along with the Access Key and Secret keys for the dedicated Mimecast admin user are obtainable via the Mimecast Administration Console: Administration | Services | API and Platform Integrations. + +> The Mimecast API Base URL for each region is documented here: https://integrations.mimecast.com/documentation/api-overview/global-base-urls/ + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Resource group** + +You need to have a resource group created with a subscription you are going to use. + +**2. Functions app** + +You need to have an Azure App registered for this connector to use +1. Application Id +2. Tenant Id +3. Client Id +4. Client Secret + +>**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**3. Configuration:** + +**STEP 1 - Configuration steps for the Mimecast API** + +Go to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later) + +**STEP 2 - Deploy Mimecast API Connector** + +>**IMPORTANT:** Before deploying the Mimecast API connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Mimecast API authorization key(s) or Token, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**5. Deploy the Mimecast Targeted Threat Protection Data Connector:** + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastTTP-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the following fields: + - appName: Unique string that will be used as id for the app in Azure platform + - objectId: Azure portal ---> Azure Active Directory ---> more info ---> Profile -----> Object ID + - appInsightsLocation(default): westeurope + - mimecastEmail: Email address of dedicated user for this integraion + - mimecastPassword: Password for dedicated user + - mimecastAppId: Application Id from the Microsoft Sentinel app registered with Mimecast + - mimecastAppKey: Application Key from the Microsoft Sentinel app registered with Mimecast + - mimecastAccessKey: Access Key for the dedicated Mimecast user + - mimecastSecretKey: Secret Key for dedicated Mimecast user + - mimecastBaseURL: Regional Mimecast API Base URL + - activeDirectoryAppId: Azure portal ---> App registrations ---> [your_app] ---> Application ID + - activeDirectoryAppSecret: Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> [your_app_secret] + - workspaceId: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Workspace ID (or you can copy workspaceId from above) + - workspaceKey: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Primary Key (or you can copy workspaceKey from above) + - AppInsightsWorkspaceResourceID : Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Properties ---> Resource ID + + >Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. + +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +6. Go to ***Azure portal ---> Resource groups ---> [your_resource_group] ---> [appName](type: Storage account) ---> Storage Explorer ---> BLOB CONTAINERS ---> TTP checkpoints ---> Upload*** and create empty files on your machine named attachment-checkpoint.txt, impersonation-checkpoint.txt, url-checkpoint.txt and select them for upload (this is done so that date_range for TTP logs are stored in consistent state) + | | | |--------------------------|---| | **Tables Ingested** | `MimecastTTPAttachment_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/misp2sentinel.md b/Tools/Solutions Analyzer/connector-docs/solutions/misp2sentinel.md index 17b2cfaa2e7..b759cc84d66 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/misp2sentinel.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/misp2sentinel.md @@ -22,6 +22,21 @@ This solution provides **1 data connector(s)**. This solution installs the MISP2Sentinel connector that allows you to automatically push threat indicators from MISP to Microsoft Sentinel via the Upload Indicators REST API. After installing the solution, configure and enable this data connector by following guidance in Manage solution view. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Installation and setup instructions** + +Use the documentation from this GitHub repository to install and configure the MISP to Microsoft Sentinel connector: + +https://github.com/cudeso/misp2sentinel + | | | |--------------------------|---| | **Tables Ingested** | `ThreatIntelligenceIndicator` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/mongodbatlas.md b/Tools/Solutions Analyzer/connector-docs/solutions/mongodbatlas.md index 4519c42fc4d..2c38f0d72f4 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/mongodbatlas.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/mongodbatlas.md @@ -21,6 +21,52 @@ This solution provides **1 data connector(s)**. The [MongoDBAtlas](https://www.mongodb.com/products/platform/atlas-database) Logs connector gives the capability to upload MongoDB Atlas database logs into Microsoft Sentinel through the MongoDB Atlas Administration API. Refer to the [API documentation](https://www.mongodb.com/docs/api/doc/atlas-admin-api-v2/) for more information. The connector provides the ability to get a range of database log messages for the specified hosts and specified project. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: MongoDB Atlas service account **Client ID** and **Client Secret** are required. [See the documentation to learn more about creating a service account](https://www.mongodb.com/docs/atlas/configure-api-access/#grant-programmatic-access-to-an-organization) + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to 'MongoDB Atlas' to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>Ensure the workspace is added to Microsoft Sentinel before deploying the connector. + +**1. STEP 1 - Configuration steps for the 'MongoDB Atlas Administration API'** + +1. [Follow these instructions](https://www.mongodb.com/docs/atlas/configure-api-access/#grant-programmatic-access-to-an-organization) to create a MongoDB Atlas service account. +2. Copy the **Client ID** and **Client Secret** you created, also the **Group ID** (Project) and each **Cluster ID** (Hostname) required for later steps. +3. Refer [MongoDB Atlas API documentation](https://www.mongodb.com/docs/api/doc/atlas-admin-api-v2/operation/operation-downloadgroupclusterlog) for more details. +4. The client secret can be passed into the connector via an Azure key vault or directly into the connector. +5. If you want to use the key vault option create a key vault, using a Vault Access Policy, with a secret named **mongodb-client-secret** and your client secret saved as the secret value. + +**2. STEP 2 - Deploy the 'MongoDB Atlas Logs' connector and the associated Azure Function** + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#view/Microsoft_Azure_CreateUIDef/CustomDeploymentBlade/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FMongoDBAtlas%2FData%20Connectors%2FMongoDBAtlasLogs%2Fazuredeploy_Connector_MongoDBAtlasLogs_AzureFunction.json/uiFormDefinitionUri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FMongoDBAtlas%2FData%20Connectors%2FMongoDBAtlasLogs%2FcreateUiDef.json) + +**3. STEP 3 - Set the connector parameters** + +1. Select the preferred **Subscription** and an existing **Resource Group**. +2. Enter an existing **Log Analytics Workspace Resource ID** belonging to the resource group. +3. Click **Next** +4. Enter the **MongoDB Group ID**, a list of up to 10 **MongoDB Cluster IDs**, each on a separate line, and **MongoDB Client ID**. +5. Choose for **Authentication Method** either **Client Secret** and copy in your client secret value or **Key Vault** and copy in the name of your key vault. +Click **Next** +6. Review the MongoDB filters. Select logs from at least one category. Click **Next** +7. Review the schedule. Click **Next** +8. Review the settings then click **Create**. + | | | |--------------------------|---| | **Tables Ingested** | `MDBALogTable_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/mongodbaudit.md b/Tools/Solutions Analyzer/connector-docs/solutions/mongodbaudit.md index 41446cf4c98..39ce6396e2c 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/mongodbaudit.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/mongodbaudit.md @@ -21,6 +21,74 @@ This solution provides **1 data connector(s)**. MongoDB data connector provides the capability to ingest [MongoDBAudit](https://www.mongodb.com/) into Microsoft Sentinel. Refer to [MongoDB documentation](https://www.mongodb.com/docs/manual/tutorial/getting-started/) for more information. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias MongoDBAudit and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MongoDBAudit/Parsers/MongoDBAudit.txt) on the second line of the query, enter the hostname(s) of your MongoDBAudit device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux or Windows** + +Install the agent on the Tomcat Server where the logs are generated. + +> Logs from MongoDB Enterprise Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure MongoDBAudit to write logs to files** + +Edit mongod.conf file (for Linux) or mongod.cfg (for Windows) to write logs to files: + +>**dbPath**: data/db + +>**path**: data/db/auditLog.json + +Set the following parameters: **dbPath** and **path**. Refer to the [MongoDB documentation for more details](https://www.mongodb.com/docs/manual/tutorial/configure-auditing/) + +**3. Configure the logs to be collected** + +Configure the custom log directory to be collected +- **Open custom logs settings** + +1. Select the link above to open your workspace advanced settings +2. From the left pane, select **Settings**, select **Custom Logs** and click **+Add custom log** +3. Click **Browse** to upload a sample of a MongoDBAudit log file. Then, click **Next >** +4. Select **Timestamp** as the record delimiter and click **Next >** +5. Select **Windows** or **Linux** and enter the path to MongoDBAudit logs based on your configuration +6. After entering the path, click the '+' symbol to apply, then click **Next >** +7. Add **MongoDBAudit** as the custom log Name (the '_CL' suffix will be added automatically) and click **Done**. + +**3. Validate connectivity** + +It may take upwards of 20 minutes until your logs start to appear in Microsoft Sentinel. + | | | |--------------------------|---| | **Tables Ingested** | `MongoDBAudit_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/morphisec.md b/Tools/Solutions Analyzer/connector-docs/solutions/morphisec.md index 18a2a1e999d..699a0b85b25 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/morphisec.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/morphisec.md @@ -25,6 +25,25 @@ This solution provides more than just data ingestion; it equips your security te With this solution, you can empower your SOC to leverage Morphisec's powerful threat prevention within a unified investigation and response workflow in Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Configure Morphisec Connector** + +1. Create an API key client in Morphisec Console with read permissions to fetch alerts. +2. Provide the Client ID and Client Secret in the connector configuration. +- **Morphisec Base URL**: https://.morphisec.cloud +- **Client ID**: Enter the Client ID +- **Client Secret**: (password field) +- **Tenant ID**: Enter your Morphisec Tenant ID +- Click 'Connect to Morphisec' to establish connection + | | | |--------------------------|---| | **Tables Ingested** | `MorphisecAlerts_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/mulesoft.md b/Tools/Solutions Analyzer/connector-docs/solutions/mulesoft.md index dce6441949a..7845fd14011 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/mulesoft.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/mulesoft.md @@ -21,6 +21,102 @@ This solution provides **1 data connector(s)**. The [MuleSoft Cloudhub](https://www.mulesoft.com/platform/saas/cloudhub-ipaas-cloud-based-integration) data connector provides the capability to retrieve logs from Cloudhub applications using the Cloudhub API and more events into Microsoft Sentinel through the REST API. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **MuleSoftEnvId**, **MuleSoftAppName**, **MuleSoftUsername** and **MuleSoftPassword** are required for making API calls. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Azure Blob Storage API to pull logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**MuleSoftCloudhub**](https://aka.ms/sentinel-MuleSoftCloudhub-parser) which is deployed with the Microsoft Sentinel Solution. + +**Note: This data connector fetch only the logs of the CloudHub application using Platform API and not of CloudHub 2.0 application** + +**STEP 1 - Configuration steps for the MuleSoft Cloudhub API** + + Follow the instructions to obtain the credentials. + +1. Obtain the **MuleSoftEnvId**, **MuleSoftAppName**, **MuleSoftUsername** and **MuleSoftPassword** using the [documentation](https://help.mulesoft.com/s/article/How-to-get-Cloudhub-application-information-using-Anypoint-Platform-API). +2. Save credentials for using in the data connector. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the MuleSoft Cloudhub data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the MuleSoft Cloudhub data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MuleSoftCloudhubAPI-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. +3. Enter the **MuleSoftEnvId**, **MuleSoftAppName**, **MuleSoftUsername** and **MuleSoftPassword** and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**Option 2 - Manual Deployment of Azure Functions** + + Use the following step-by-step instructions to deploy the MuleSoft Cloudhub data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-MuleSoftCloudhubAPI-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. MuleSoftXXXXX). + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select ** New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + MuleSoftEnvId + MuleSoftAppName + MuleSoftUsername + MuleSoftPassword + WorkspaceID + WorkspaceKey + logAnalyticsUri (optional) +> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `MuleSoft_Cloudhub_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/nasuni.md b/Tools/Solutions Analyzer/connector-docs/solutions/nasuni.md index 8abdc41c777..b6373f658bd 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/nasuni.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/nasuni.md @@ -22,6 +22,46 @@ This solution provides **1 data connector(s)**. The [Nasuni](https://www.nasuni.com/) connector allows you to easily connect your Nasuni Edge Appliance Notifications and file system audit logs with Microsoft Sentinel. This gives you more insight into activity within your Nasuni infrastructure and improves your security operation capabilities. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Follow the configuration steps below to configure your Linux machine to send Nasuni event information to Microsoft Sentinel. Refer to the [Azure Monitor Agent documenation](https://learn.microsoft.com/en-us/azure/azure-monitor/agents/agents-overview) for additional details on these steps. +Configure the facilities you want to collect and their severities. +1. Select the link below to open your workspace agents configuration, and select the Syslog tab. +2. Select Add facility and choose from the drop-down list of facilities. Repeat for all the facilities you want to add. +3. Mark the check boxes for the desired severities for each facility. +4. Click Apply. +- **Open Syslog settings** + +**3. Configure Nasuni Edge Appliance settings** + +Follow the instructions in the [Nasuni Management Console Guide](https://view.highspot.com/viewer/629a633ae5b4caaf17018daa?iid=5e6fbfcbc7143309f69fcfcf) to configure Nasuni Edge Appliances to forward syslog events. Use the IP address or hostname of the Linux device running the Azure Monitor Agent in the Servers configuration field for the syslog settings. + | | | |--------------------------|---| | **Tables Ingested** | `Nasuni` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/netclean-proactive.md b/Tools/Solutions Analyzer/connector-docs/solutions/netclean-proactive.md index aa3962d195a..c7fa8bfa205 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/netclean-proactive.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/netclean-proactive.md @@ -21,6 +21,41 @@ This solution provides **1 data connector(s)**. This connector uses the Netclean Webhook (required) and Logic Apps to push data into Microsoft Sentinel Log Analytics +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** NetClean ProActive uses a Webhook to expose incident data, Azure Logic Apps is used to receive and push data to Log Analytics This might result in additional data ingestion costs. + It's possible to test this without Logic Apps or NetClean Proactive see option 2 +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**1. Option 1: Logic app** + +1. Create a new logic app + Use When a HTTP request is recived as the Trigger and save it. It will now have generated a URL that can be used in the ProActive webconsole configuration. + Add an action: + Select the Azure Log Analytics Data Collector and choose Send Data + Enter Connection Name, Workspace ID and Workspace Key, you will find the information needed in your Log Analytics workspace under Settings-->Agents-->Log Analytics agent instructions. + In JSON Request body add @triggerBody(). in Custom Log Name add Netclean_Incidents. + +**2. Option 2 (Testing only)** + +Ingest data using a api function. please use the script found on + https://learn.microsoft.com/en-us/azure/azure-monitor/logs/data-collector-api?tabs=powershell +Replace the CustomerId and SharedKey values with your values +Replace the content in $json variable to the sample data found here: https://github.com/Azure/Azure-Sentinel/blob/master/Sample%20Data/Custom/Netclean_Incidents_CL.json . +Set the LogType varible to **Netclean_Incidents_CL** +Run the script + | | | |--------------------------|---| | **Tables Ingested** | `Netclean_Incidents_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/netskope.md b/Tools/Solutions Analyzer/connector-docs/solutions/netskope.md index 661cbf9e303..48e0fc9ca63 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/netskope.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/netskope.md @@ -21,6 +21,95 @@ This solution provides **1 data connector(s)**. The [Netskope Cloud Security Platform](https://www.netskope.com/platform) connector provides the capability to ingest Netskope logs and events into Microsoft Sentinel. The connector provides visibility into Netskope Platform Events and Alerts in Microsoft Sentinel to improve monitoring and investigation capabilities. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Netskope API Token**: A Netskope API Token is required. [See the documentation to learn more about Netskope API](https://innovatechcloud.goskope.com/docs/Netskope_Help/en/rest-api-v1-overview.html). **Note:** A Netskope account is required + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to Netskope to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Netskope and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskope/Parsers/Netskope.txt), on the second line of the query, enter the hostname(s) of your Netskope device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Configuration steps for the Netskope API** + + [Follow these instructions](https://docs.netskope.com/en/rest-api-v1-overview.html) provided by Netskope to obtain an API Token. **Note:** A Netskope account is required + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Netskope connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Netskope API Authorization Token, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +This method provides an automated deployment of the Netskope connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-netskope-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID**, **Workspace Key**, **API Key**, and **URI**. + - Use the following schema for the `uri` value: `https://.goskope.com` Replace `` with your domain. + - The default **Time Interval** is set to pull the last five (5) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. + - The default **Log Types** is set to pull all 6 available log types (`alert, page, application, audit, infrastructure, network`), remove any are not required. + - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. +6. After successfully deploying the connector, download the Kusto Function to normalize the data fields. [Follow the steps](https://aka.ms/sentinelgithubparsersnetskope) to use the Kusto function alias, **Netskope**. + +**4. Option 2 - Manual Deployment of Azure Functions** + +This method provides the step-by-step instructions to deploy the Netskope connector manually with Azure Function. + +**1. Create a Function App** + +1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp), and select **+ Add**. +2. In the **Basics** tab, ensure Runtime stack is set to **Powershell Core**. +3. In the **Hosting** tab, ensure the **Consumption (Serverless)** plan type is selected. +4. Make other preferrable configuration changes, if needed, then click **Create**. + +**2. Import Function App Code** + +1. In the newly created Function App, select **Functions** on the left pane and click **+ Add**. +2. Select **Timer Trigger**. +3. Enter a unique Function **Name** and modify the cron schedule, if needed. The default value is set to run the Function App every 5 minutes. (Note: the Timer trigger should match the `timeInterval` value below to prevent overlapping data), click **Create**. +4. Click on **Code + Test** on the left pane. +5. Copy the [Function App Code](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/Netskope/Data%20Connectors/Netskope/AzureFunctionNetskope/run.ps1) and paste into the Function App `run.ps1` editor. +5. Click **Save**. + +**3. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following seven (7) application settings individually, with their respective string values (case-sensitive): + apikey + workspaceID + workspaceKey + uri + timeInterval + logTypes + logAnalyticsUri (optional) +> - Enter the URI that corresponds to your region. The `uri` value must follow the following schema: `https://.goskope.com` - There is no need to add subsquent parameters to the Uri, the Function App will dynamically append the parameteres in the proper format. +> - Set the `timeInterval` (in minutes) to the default value of `5` to correspond to the default Timer Trigger of every `5` minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly to prevent overlapping data ingestion. +> - Set the `logTypes` to `alert, page, application, audit, infrastructure, network` - This list represents all the avaliable log types. Select the log types based on logging requirements, seperating each by a single comma. +> - Note: If using Azure Key Vault, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. +> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. +5. After successfully deploying the connector, download the Kusto Function to normalize the data fields. [Follow the steps](https://aka.ms/sentinelgithubparsersnetskope) to use the Kusto function alias, **Netskope**. + | | | |--------------------------|---| | **Tables Ingested** | `Netskope_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/netskopev2.md b/Tools/Solutions Analyzer/connector-docs/solutions/netskopev2.md index 69fae128add..94669aa69b6 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/netskopev2.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/netskopev2.md @@ -42,6 +42,105 @@ The [Netskope Web Transactions](https://docs.netskope.com/en/netskope-help/data- +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Azure Subscription**: Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group. +- **Microsoft.Compute permissions**: Read and write permissions to Azure VMs is required. [See the documentation to learn more about Azure VMs](https://learn.microsoft.com/azure/virtual-machines/overview). +- **TransactionEvents Credentials and Permissions**: **Netskope Tenant** and **Netskope API Token** is required. [See the documentation to learn more about Transaction Events.](https://docs.netskope.com/en/netskope-help/data-security/transaction-events/netskope-transaction-events/) +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector provides the functionality of ingesting Netskope Web Transactions data using a docker image to be deployed on a virtual machine (Either Azure VM/On Premise VM). Check the [Azure VM pricing page](https://azure.microsoft.com/pricing/details/virtual-machines/linux) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Steps to create/get Credentials for the Netskope account** + + Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**: + 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar. + 2. Click on Tools and then **REST API v2** + 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from. + 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage. + +**STEP 2 - Choose one from the following two deployment options to deploy the docker based data connector to ingest Netskope Web Transactions data ** + +>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available, as well as the Netskope API Authorization Key(s) [Make sure the token has permissions for transaction events]. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Using Azure Resource Manager (ARM) Template to deploy VM [Recommended]** + +Using the ARM template deploy an Azure VM, install the prerequisites and start execution. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2WebTransactions-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the below information : + Docker Image Name (mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions) + Netskope HostName + Netskope API Token + Seek Timestamp (The epoch timestamp that you want to seek the pubsublite pointer, can be left empty) + Workspace ID + Workspace Key + Backoff Retry Count (The retry count for token related errors before restarting the execution.) + Backoff Sleep Time (Number of seconds to sleep before retrying) + Idle Timeout (Number of seconds to wait for Web Transactions Data before restarting execution) + VM Name + Authentication Type + Admin Password or Key + DNS Label Prefix + Ubuntu OS Version + Location + VM Size + Subnet Name + Network Security Group Name + Security Type +4. Click on **Review+Create**. +5. Then after validation click on **Create** to deploy. + +**4. Option 2 - Manual Deployment on previously created virtual machine** + +Use the following step-by-step instructions to deploy the docker based data connector manually on a previously created virtual machine. + +**1. Install docker and pull docker Image** + +>**NOTE:** Make sure that the VM is linux based (preferably Ubuntu). + +1. Firstly you will need to [SSH into the virtual machine](https://learn.microsoft.com/azure/virtual-machines/linux-vm-connect?tabs=Linux). +2. Now install [docker engine](https://docs.docker.com/engine/install/). +3. Now pull the docker image from docker hub using the command: 'sudo docker pull mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions'. +4. Now to run the docker image use the command: 'sudo docker run -it -v $(pwd)/docker_persistent_volume:/app mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions'. You can replace mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions with the image id. Here docker_persistent_volume is the name of the folder that would be created on the vm in which the files will get stored. + +**2. Configure the Parameters** + +1. Once the docker image is running it will ask for the required parameters. +2. Add each of the following application settings individually, with their respective values (case-sensitive): + Netskope HostName + Netskope API Token + Seek Timestamp (The epoch timestamp that you want to seek the pubsublite pointer, can be left empty) + Workspace ID + Workspace Key + Backoff Retry Count (The retry count for token related errors before restarting the execution.) + Backoff Sleep Time (Number of seconds to sleep before retrying) + Idle Timeout (Number of seconds to wait for Web Transactions Data before restarting execution) +3. Now the execution has started but is in interactive mode, so that shell cannot be stopped. To run it as a background process, stop the current execution by pressing Ctrl+C and then use the command: 'sudo docker run -d -v $(pwd)/docker_persistent_volume:/app mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions'. + +**3. Stop the docker container** + +1. Use the command 'sudo docker container ps' to list the running docker containers. Note down your container id. +2. Now stop the container using the command: 'sudo docker stop *<*container-id*>*'. + | | | |--------------------------|---| | **Tables Ingested** | `NetskopeWebtxData_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/netwrix-auditor.md b/Tools/Solutions Analyzer/connector-docs/solutions/netwrix-auditor.md index 23d447c06a6..bd4e8c295d9 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/netwrix-auditor.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/netwrix-auditor.md @@ -25,6 +25,61 @@ This solution provides **2 data connector(s)**. Netwrix Auditor data connector provides the capability to ingest [Netwrix Auditor (formerly Stealthbits Privileged Activity Manager)](https://www.netwrix.com/auditor.html) events into Microsoft Sentinel. Refer to [Netwrix documentation](https://helpcenter.netwrix.com/) for more information. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on NetwrixAuditor parser based on a Kusto Function to work as expected. This parser is installed along with solution installation. +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Configure Netwrix Auditor to send logs using CEF** + + [Follow the instructions](https://www.netwrix.com/download/QuickStart/Netwrix_Auditor_Add-on_for_HPE_ArcSight_Quick_Start_Guide.pdf) to configure event export from Netwrix Auditor. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/nginx-http-server.md b/Tools/Solutions Analyzer/connector-docs/solutions/nginx-http-server.md index 25dd95af35d..3678ca6dc58 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/nginx-http-server.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/nginx-http-server.md @@ -21,6 +21,61 @@ This solution provides **1 data connector(s)**. The NGINX HTTP Server data connector provides the capability to ingest [NGINX](https://nginx.org/en/) HTTP Server events into Microsoft Sentinel. Refer to [NGINX Logs documentation](https://nginx.org/en/docs/http/ngx_http_log_module.html) for more information. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias NGINXHTTPServer and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NGINX%20HTTP%20Server/Parsers/NGINXHTTPServer.txt).The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux or Windows** + +Install the agent on the NGINX HTTP Server where the logs are generated. + +> Logs from NGINX HTTP Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure the logs to be collected** + +Configure the custom log directory to be collected +- **Open custom logs settings** + +1. Select the link above to open your workspace advanced settings +2. From the left pane, select **Data**, select **Custom Logs** and click **Add+** +3. Click **Browse** to upload a sample of a NGINX HTTP Server log file (e.g. access.log or error.log). Then, click **Next >** +4. Select **New line** as the record delimiter and click **Next >** +5. Select **Windows** or **Linux** and enter the path to NGINX HTTP logs based on your configuration. Example: + - **Linux** Directory: '/var/log/nginx/*.log' +6. After entering the path, click the '+' symbol to apply, then click **Next >** +7. Add **NGINX_CL** as the custom log Name and click **Done** + | | | |--------------------------|---| | **Tables Ingested** | `NGINX_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/nonamesecurity.md b/Tools/Solutions Analyzer/connector-docs/solutions/nonamesecurity.md index bb240794d6e..ebd2f2c32f5 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/nonamesecurity.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/nonamesecurity.md @@ -21,6 +21,24 @@ This solution provides **1 data connector(s)**. Noname Security solution to POST data into a Microsoft Sentinel SIEM workspace via the Azure Monitor REST API +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Configure the Noname Sentinel integration.** + +Configure the Sentinel workflow in the Noname integrations settings. Find documentation at https://docs.nonamesecurity.com +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `NonameAPISecurityAlert_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/nordpass.md b/Tools/Solutions Analyzer/connector-docs/solutions/nordpass.md index 1434ca7db5c..94c51f5f1dc 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/nordpass.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/nordpass.md @@ -21,6 +21,24 @@ This solution provides **1 data connector(s)**. Integrating NordPass with Microsoft Sentinel SIEM via the API will allow you to automatically transfer Activity Log data from NordPass to Microsoft Sentinel and get real-time insights, such as item activity, all login attempts, and security notifications. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. + +**Custom Permissions:** + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +To proceed with the Microsoft Sentinel setup + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-Nordpass-azuredeploy) +2. **Please note that after the successful deployment, the system pulls Activity Log data every 1 minute by default.** + | | | |--------------------------|---| | **Tables Ingested** | `NordPassEventLogs_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/nozominetworks.md b/Tools/Solutions Analyzer/connector-docs/solutions/nozominetworks.md index 6bd4ab52234..69236e2fdd2 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/nozominetworks.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/nozominetworks.md @@ -25,6 +25,67 @@ This solution provides **2 data connector(s)**. The [Nozomi Networks](https://www.nozominetworks.com/) data connector provides the capability to ingest Nozomi Networks Events into Microsoft Sentinel. Refer to the Nozomi Networks [PDF documentation](https://www.nozominetworks.com/resources/data-sheets-brochures-learning-guides/) for more information. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**NozomiNetworksEvents**](https://aka.ms/sentinel-NozomiNetworks-parser) which is deployed with the Microsoft Sentinel Solution. +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Common Event Format (CEF) logs to Syslog agent** + + Follow these steps to configure Nozomi Networks device for sending Alerts, Audit Logs, Health Logs log via syslog in CEF format: + +> 1. Log in to the Guardian console. + +> 2. Navigate to Administration->Data Integration, press +Add and select the Common Event Format (CEF) from the drop down + +> 3. Create New Endpoint using the appropriate host information and enable Alerts, Audit Logs, Health Logs for sending. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/nxlog-bsm-macos.md b/Tools/Solutions Analyzer/connector-docs/solutions/nxlog-bsm-macos.md index 54b92171deb..e67c63f7309 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/nxlog-bsm-macos.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/nxlog-bsm-macos.md @@ -21,6 +21,22 @@ This solution provides **1 data connector(s)**. The [NXLog BSM](https://docs.nxlog.co/refman/current/im/bsm.html) macOS data connector uses Sun's Basic Security Module (BSM) Auditing API to read events directly from the kernel for capturing audit events on the macOS platform. This REST API connector can efficiently export macOS audit events to Microsoft Sentinel in real-time. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Follow the step-by-step instructions in the *NXLog User Guide* Integration Topic [Microsoft Sentinel](https://docs.nxlog.co/userguide/integrate/microsoft-azure-sentinel.html) to configure this connector. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `BSMmacOS_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/nxlog-fim.md b/Tools/Solutions Analyzer/connector-docs/solutions/nxlog-fim.md index fbefddb276c..ef6a8d45d9c 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/nxlog-fim.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/nxlog-fim.md @@ -21,6 +21,22 @@ This solution provides **1 data connector(s)**. The [NXLog FIM](https://docs.nxlog.co/refman/current/im/fim.html) module allows for the scanning of files and directories, reporting detected additions, changes, renames and deletions on the designated paths through calculated checksums during successive scans. This REST API connector can efficiently export the configured FIM events to Microsoft Sentinel in real time. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Follow the step-by-step instructions in the [Microsoft Sentinel](https://docs.nxlog.co/userguide/integrate/microsoft-azure-sentinel.html) integration chapter of the *NXLog User Guide* to configure this connector. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `NXLogFIM_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/nxlog-linuxaudit.md b/Tools/Solutions Analyzer/connector-docs/solutions/nxlog-linuxaudit.md index bb42a069740..66b247fd33f 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/nxlog-linuxaudit.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/nxlog-linuxaudit.md @@ -21,6 +21,22 @@ This solution provides **1 data connector(s)**. The [NXLog LinuxAudit](https://docs.nxlog.co/refman/current/im/linuxaudit.html) data connector supports custom audit rules and collects logs without auditd or any other user-space software. IP addresses and group/user IDs are resolved to their respective names making [Linux audit](https://docs.nxlog.co/userguide/integrate/linux-audit.html) logs more intelligible to security analysts. This REST API connector can efficiently export Linux security events to Microsoft Sentinel in real-time. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Follow the step-by-step instructions in the *NXLog User Guide* Integration Topic [Microsoft Sentinel](https://docs.nxlog.co/userguide/integrate/microsoft-azure-sentinel.html) to configure this connector. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `LinuxAudit_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/nxlogaixaudit.md b/Tools/Solutions Analyzer/connector-docs/solutions/nxlogaixaudit.md index 516486100d8..5bad2a938cd 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/nxlogaixaudit.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/nxlogaixaudit.md @@ -21,6 +21,24 @@ This solution provides **1 data connector(s)**. The [NXLog AIX Audit](https://docs.nxlog.co/refman/current/im/aixaudit.html) data connector uses the AIX Audit subsystem to read events directly from the kernel for capturing audit events on the AIX platform. This REST API connector can efficiently export AIX Audit events to Microsoft Sentinel in real time. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**NXLog_parsed_AIX_Audit_view**](https://aka.ms/sentinel-nxlogaixaudit-parser) which is deployed with the Microsoft Sentinel Solution. + +Follow the step-by-step instructions in the *NXLog User Guide* Integration Guide [Microsoft Sentinel](https://docs.nxlog.co/userguide/integrate/microsoft-azure-sentinel.html) to configure this connector. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `AIX_Audit_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/nxlogdnslogs.md b/Tools/Solutions Analyzer/connector-docs/solutions/nxlogdnslogs.md index 10bcbaf40b6..9aa8d8ccb3a 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/nxlogdnslogs.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/nxlogdnslogs.md @@ -21,6 +21,24 @@ This solution provides **1 data connector(s)**. The NXLog DNS Logs data connector uses Event Tracing for Windows ([ETW](https://docs.microsoft.com/windows/apps/trace-processing/overview)) for collecting both Audit and Analytical DNS Server events. The [NXLog *im_etw* module](https://docs.nxlog.co/refman/current/im/etw.html) reads event tracing data directly for maximum efficiency, without the need to capture the event trace into an .etl file. This REST API connector can forward DNS Server events to Microsoft Sentinel in real time. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on parsers based on Kusto functions deployed with the Microsoft Sentinel Solution to work as expected. The [**ASimDnsMicrosoftNXLog **](https://aka.ms/sentinel-nxlogdnslogs-parser) is designed to leverage Microsoft Sentinel's built-in DNS-related analytics capabilities. + +Follow the step-by-step instructions in the *NXLog User Guide* Integration Topic [Microsoft Sentinel](https://docs.nxlog.co/userguide/integrate/microsoft-azure-sentinel.html) to configure this connector. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `NXLog_DNS_Server_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/obsidian-datasharing.md b/Tools/Solutions Analyzer/connector-docs/solutions/obsidian-datasharing.md index 249ee5e7185..dba52a0f1b8 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/obsidian-datasharing.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/obsidian-datasharing.md @@ -21,6 +21,43 @@ This solution provides **1 data connector(s)**. The Obsidian Datasharing connector provides the capability to read raw event data from Obsidian Datasharing in Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Custom Permissions:** +- **Microsoft Entra**: Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher. +- **Microsoft Azure**: Permission to assign Monitoring Metrics Publisher role on data collection rule (DCR). Typically requires Azure RBAC Owner or User Access Administrator role + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Create ARM Resources and Provide the Required Permissions** + +This connector reads data from the tables that Obsidian Datasharing uses in a Microsoft Analytics Workspace, if the data forwarding option is enabled in Obsidian Datasharing then raw event data is sent to the Microsoft Sentinel Ingestion API. +#### Automated Configuration and Secure Data Ingestion with Entra Application +Clicking on "Deploy" will trigger the creation of Log Analytics tables and a Data Collection Rule (DCR). +It will then create an Entra application, link the DCR to it, and set the entered secret in the application. This setup enables data to be sent securely to the DCR using an Entra token. +Deploy Obsidian Datasharing connector resources + +**2. Push your logs into the workspace** + +Use the following parameters to configure the your machine to send the logs to the workspace. +- **Tenant ID (Directory ID)**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Entra App Registration Application ID**: `ApplicationId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Entra App Registration Secret**: `ApplicationSecret` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Data Collection Endpoint Uri**: `DataCollectionEndpoint` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Data Collection Rule Immutable ID**: `DataCollectionRuleId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Activity Stream Name**: `Custom-ObsidianActivity_CL` +- **Threat Stream Name**: `Custom-ObsidianThreat_CL` + | | | |--------------------------|---| | **Tables Ingested** | `ObsidianActivity_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/okta-single-sign-on.md b/Tools/Solutions Analyzer/connector-docs/solutions/okta-single-sign-on.md index 448eb7ca547..5d14cf0d9d9 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/okta-single-sign-on.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/okta-single-sign-on.md @@ -29,6 +29,44 @@ This solution provides **3 data connector(s)**. The [Okta Single Sign-On (SSO)](https://www.okta.com/products/single-sign-on/) data connector provides the capability to ingest audit and event logs from the Okta Sysem Log API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform and uses the Okta System Log API to fetch the events. The connector supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **Okta API Token**: An Okta API token. Follow the [following instructions](https://developer.okta.com/docs/guides/create-an-api-token/main/) to create an See the [documentation](https://developer.okta.com/docs/reference/api/system-log/) to learn more about Okta System Log API. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +To enable the Okta Single Sign-On for Microsoft Sentinel, provide the required information below and click on Connect. +> +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Endpoint** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + +**Add domain** + +*Add domain* + +When you click the "Add domain" button in the portal, a configuration form will open. You'll need to provide: + +- **Okta Domain Name** (optional): Okta Domain Name (e.g., myDomain.okta.com) +- **API Key** (optional): API Key + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + | | | |--------------------------|---| | **Tables Ingested** | `OktaV2_CL` | @@ -46,7 +84,7 @@ This solution ingests data into **4 table(s)**: |-------|-------------------| | `OktaNativePoller_CL` | [Okta Single Sign-On (Polling CCP)](../connectors/oktasso-polling.md) | | `OktaV2_CL` | [Okta Single Sign-On](../connectors/oktassov2.md) | -| `Okta_CL` | [Okta Single Sign-On](../connectors/oktassov2.md), [Okta Single Sign-On](../connectors/oktasso.md) | +| `Okta_CL` | [Okta Single Sign-On](../connectors/oktasso.md), [Okta Single Sign-On](../connectors/oktassov2.md) | | `signIns` | [Okta Single Sign-On (Preview)](../connectors/oktassov2.md) | [← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/onapsis-defend.md b/Tools/Solutions Analyzer/connector-docs/solutions/onapsis-defend.md index ae5ba9f498c..0a445dd4874 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/onapsis-defend.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/onapsis-defend.md @@ -22,6 +22,43 @@ This solution provides **1 data connector(s)**. Onapsis Defend Integration is aimed at forwarding alerts and logs collected and detected by Onapsis Platform into Microsoft Sentinel SIEM +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. + +**Custom Permissions:** +- **Microsoft Entra**: Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher. +- **Microsoft Azure**: Permission to assign Monitoring Metrics Publisher role on data collection rules. Typically requires Azure RBAC Owner or User Access Administrator role. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Create ARM Resources and Provide the Required Permissions** + +We will create data collection rule (DCR) and data collection endpoint (DCE) resources. We will also create a Microsoft Entra app registration and assign the required permissions to it. +#### Automated deployment of Azure resources +Clicking on "Deploy push connector resources" will trigger the creation of DCR and DCE resources. +It will then create a Microsoft Entra app registration with client secret and grant permissions on the DCR. This setup enables data to be sent securely to the DCR using a OAuth v2 client credentials. +- Deploy push connector resources + Application: Onapsis Defend Integration push to Microsoft Sentinel + +**2. Maintain the data collection endpoint details and authentication info in Onapsis Defend Integration** + +Share the data collection endpoint URL and authentication info with the Onapsis Defend Integration administrator to configure the Onapsis Defend Integration to send data to the data collection endpoint. +- **Use this value to configure as Tenant ID in the LogIngestionAPI credential.**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Entra Application ID**: `ApplicationId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Entra Application Secret**: `ApplicationSecret` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Use this value to configure the LogsIngestionURL parameter when deploying the IFlow.**: `DataCollectionEndpoint` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **DCR Immutable ID**: `DataCollectionRuleId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `Onapsis_Defend_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/onapsis-platform.md b/Tools/Solutions Analyzer/connector-docs/solutions/onapsis-platform.md index 9eac5b9a44d..7a46d54b1bc 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/onapsis-platform.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/onapsis-platform.md @@ -21,6 +21,67 @@ This solution provides **1 data connector(s)**. The Onapsis Connector allows you to export the alarms triggered in the Onapsis Platform into Microsoft Sentinel in real-time. This gives you the ability to monitor the activity on your SAP systems, identify incidents and respond to them quickly. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your Onapsis Console and Microsoft Sentinel. This machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Common Event Format (CEF) logs to Syslog agent** + +Refer to the Onapsis in-product help to set up log forwarding to the Syslog agent. + +> 1. Go to Setup > Third-party integrations > Defend Alarms and follow the instructions for Microsoft Sentinel. + +> 2. Make sure your Onapsis Console can reach the proxy machine where the agent is installed - logs should be sent to port 514 using TCP. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Create Onapsis lookup function for incident enrichment** + +[Follow these steps to get this Kusto function](https://aka.ms/sentinel-Onapsis-parser) + +**5. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/oneidentity.md b/Tools/Solutions Analyzer/connector-docs/solutions/oneidentity.md index d4d9f8e4e2a..83c888e836a 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/oneidentity.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/oneidentity.md @@ -13,27 +13,8 @@ ## Data Connectors -This solution provides **1 data connector(s)**. +**This solution does not include data connectors.** -### [One Identity Safeguard](../connectors/oneidentity.md) - -**Publisher:** One Identity LLC. - -The One Identity Safeguard (CEF) Sentinel data connector enhances the standard Common Event Format (CEF) connector with Safeguard for Privileged Sessions-specific dashboards. Use this connector to easily start utilizing the events generated by your device for visualization, alerts, investigations and more. - -| | | -|--------------------------|---| -| **Tables Ingested** | `CommonSecurityLog` | -| **Connector Definition Files** | [OneIdentity.JSON](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneIdentity/Data%20Connectors/OneIdentity.JSON) | - -[→ View full connector details](../connectors/oneidentity.md) - -## Tables Reference - -This solution ingests data into **1 table(s)**: - -| Table | Used By Connectors | -|-------|-------------------| -| `CommonSecurityLog` | [One Identity Safeguard](../connectors/oneidentity.md) | +This solution may contain other components such as analytics rules, workbooks, hunting queries, or playbooks. [← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/oneloginiam.md b/Tools/Solutions Analyzer/connector-docs/solutions/oneloginiam.md index 23eafce2fb0..711a3dd4188 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/oneloginiam.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/oneloginiam.md @@ -25,6 +25,49 @@ This solution provides **2 data connector(s)**. The [OneLogin](https://www.onelogin.com/) data connector provides the capability to ingest common OneLogin IAM Platform events into Microsoft Sentinel through REST API by using OneLogin [Events API](https://developers.onelogin.com/api-docs/1/events/get-events) and OneLogin [Users API](https://developers.onelogin.com/api-docs/1/users/get-users). The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **OneLogin IAM API Credentials**: To create API Credentials follow the document link provided here, [Click Here](https://developers.onelogin.com/api-docs/1/getting-started/working-with-api-credentials). + Make sure to have an account type of either account owner or administrator to create the API credentials. + Once you create the API Credentials you get your Client ID and Client Secret. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect OneLogin IAM Platform to Microsoft Sentinel** + +To ingest data from OneLogin IAM to Microsoft Sentinel, you have to click on Add Domain button below then you get a pop up to fill the details, provide the required information and click on Connect. You can see the domain endpoints connected in the grid. +> +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Endpoint** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + +**Add domain** + +*Add domain* + +When you click the "Add domain" button in the portal, a configuration form will open. You'll need to provide: + +- **OneLogin Domain** (optional): Enter your Company's OneLogin Domain +- **Client ID** (optional): Enter your Client ID +- **Client Secret** (optional): Enter your Client Secret + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + | | | |--------------------------|---| | **Tables Ingested** | `OneLoginEventsV2_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/onetrust.md b/Tools/Solutions Analyzer/connector-docs/solutions/onetrust.md index 8a880264c21..6d50127f949 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/onetrust.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/onetrust.md @@ -22,6 +22,42 @@ This solution provides **1 data connector(s)**. The OneTrust connector for Microsoft Sentinel provides the capability to have near real time visibility into where sensitive data has been located or remediated across across Google Cloud and other OneTrust supported data sources. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Custom Permissions:** +- **Microsoft Entra**: Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher. +- **Microsoft Azure**: Permission to assign Monitoring Metrics Publisher role on data collection rule (DCR). Typically requires Azure RBAC Owner or User Access Administrator role + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Create ARM Resources and Provide the Required Permissions** + +This connector reads data from the tables that OneTrust uses in a Microsoft Analytics Workspace. If OneTrust's data forwarding option is enabled then raw event data can be sent to the Microsoft Sentinel Ingestion API. +#### Automated Configuration and Secure Data Ingestion with Entra Application +Clicking on "Deploy" will trigger the creation of Log Analytics tables and a Data Collection Rule (DCR). +It will then create an Entra application, link the DCR to it, and set the entered secret in the application. This setup enables data to be sent securely to the DCR using an Entra token. +Deploy OneTrust connector resources + +**2. Push your logs into the workspace** + +Use the following parameters to configure the your machine to send the logs to the workspace. +- **Tenant ID (Directory ID)**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Entra App Registration Application ID**: `ApplicationId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Entra App Registration Secret**: `ApplicationSecret` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Data Collection Endpoint Uri**: `DataCollectionEndpoint` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Data Collection Rule Immutable ID**: `DataCollectionRuleId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **OneTrust Metadata Stream Name**: `Custom-OneTrustMetadataV3` + | | | |--------------------------|---| | **Tables Ingested** | `OneTrustMetadataV3_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/openvpn.md b/Tools/Solutions Analyzer/connector-docs/solutions/openvpn.md index 02592ca2bd1..adac60f55a8 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/openvpn.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/openvpn.md @@ -21,6 +21,59 @@ This solution provides **1 data connector(s)**. The [OpenVPN](https://github.com/OpenVPN) data connector provides the capability to ingest OpenVPN Server logs into Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**OpenVpnEvent**](https://aka.ms/sentinel-openvpn-parser) which is deployed with the Microsoft Sentinel Solution. + +**1. Install and onboard the agent for Linux or Windows** + +Install the agent on the Server where the OpenVPN are forwarded. + +> Logs from OpenVPN Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + +1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. +2. Select **Apply below configuration to my machines** and select the facilities and severities. +3. Click **Save**. +- **Open Syslog settings** + +**3. Check your OpenVPN logs.** + +OpenVPN server logs are written into common syslog file (depending on the Linux distribution used: e.g. /var/log/messages) + | | | |--------------------------|---| | **Tables Ingested** | `Syslog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/oracle-cloud-infrastructure.md b/Tools/Solutions Analyzer/connector-docs/solutions/oracle-cloud-infrastructure.md index 12cb1aaf099..1291c92a5e6 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/oracle-cloud-infrastructure.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/oracle-cloud-infrastructure.md @@ -29,6 +29,100 @@ The Oracle Cloud Infrastructure (OCI) data connector provides the capability to

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

+**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **OCI API Credentials**: **API Key Configuration File** and **Private Key** are required for OCI API connection. See the documentation to learn more about [creating keys for API access](https://docs.oracle.com/en-us/iaas/Content/API/Concepts/apisigningkey.htm) + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector can go over the 500 column limit of log Analytics. When this happens some logs will be dropped. For this reason the connector can be unrealiable depending on the logs that are being generated and collected. + +>**NOTE:** This connector uses Azure Functions to connect to the Azure Blob Storage API to pull logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**OCILogs**](https://aka.ms/sentinel-OracleCloudInfrastructureLogsConnector-parser) which is deployed with the Microsoft Sentinel Solution. + +**STEP 1 - Creating Stream** + +1. Log in to OCI console and go to *navigation menu* -> *Analytics & AI* -> *Streaming* +2. Click *Create Stream* +3. Select Stream Pool or create a new one +4. Provide the *Stream Name*, *Retention*, *Number of Partitions*, *Total Write Rate*, *Total Read Rate* based on your data amount. +5. Go to *navigation menu* -> *Logging* -> *Service Connectors* +6. Click *Create Service Connector* +6. Provide *Connector Name*, *Description*, *Resource Compartment* +7. Select Source: Logging +8. Select Target: Streaming +9. (Optional) Configure *Log Group*, *Filters* or use custom search query to stream only logs that you need. +10. Configure Target - select the strem created before. +11. Click *Create* + +Check the documentation to get more information about [Streaming](https://docs.oracle.com/en-us/iaas/Content/Streaming/home.htm) and [Service Connectors](https://docs.oracle.com/en-us/iaas/Content/service-connector-hub/home.htm). + +**STEP 2 - Creating credentials for OCI REST API** + +Follow the documentation to [create Private Key and API Key Configuration File.](https://docs.oracle.com/en-us/iaas/Content/API/Concepts/apisigningkey.htm) + +>**IMPORTANT:** Save Private Key and API Key Configuration File created during this step as they will be used during deployment step. + +**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the OCI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as OCI API credentials, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Option 1 - Azure Resource Manager (ARM) Template** + + Use this method for automated deployment of the OCI data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-OracleCloudInfrastructureLogsConnector-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key**, **User**, **Key_content**, **Pass_phrase**, **Fingerprint**, **Tenancy**, **Region**, **Message Endpoint**, **Stream Ocid** +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + + **Option 2 - Manual Deployment of Azure Functions** + + Use the following step-by-step instructions to deploy the OCI data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + 1. Download the [Azure Function App](https://aka.ms/sentinel-OracleCloudInfrastructureLogsConnector-functionapp) file. Extract archive to your local development computer.. +2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode. +3. After successful deployment of the function app, follow next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. Go to Azure Portal for the Function App configuration. +2. In the Function App, select the Function App Name and select **Configuration**. +3. In the **Application settings** tab, select **+ New application setting**. +4. Add each of the following application settings individually, with their respective string values (case-sensitive): + AzureSentinelWorkspaceId + AzureSentinelSharedKey + user + key_content + pass_phrase (Optional) + fingerprint + tenancy + region + Message Endpoint + StreamOcid + logAnalyticsUri (Optional) + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. +5. Once all application settings have been entered, click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `OCI_Logs_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/oracledatabaseaudit.md b/Tools/Solutions Analyzer/connector-docs/solutions/oracledatabaseaudit.md index 800562f51ba..df9ac034822 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/oracledatabaseaudit.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/oracledatabaseaudit.md @@ -21,6 +21,57 @@ This solution provides **1 data connector(s)**. The Oracle DB Audit data connector provides the capability to ingest [Oracle Database](https://www.oracle.com/database/technologies/) audit events into Microsoft Sentinel through the syslog. Refer to [documentation](https://docs.oracle.com/en/database/oracle/oracle-database/21/dbseg/introduction-to-auditing.html#GUID-94381464-53A3-421B-8F13-BD171C867405) for more information. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Oracle Database Audit and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OracleDatabaseAudit/Parsers/OracleDatabaseAuditEvent.txt). The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + +1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. +2. Select **Apply below configuration to my machines** and select the facilities and severities. +3. Click **Save**. +- **Open Syslog settings** + +**3. Configure Oracle Database Audit events to be sent to Syslog** + +Follow the below instructions + + 1. Create the Oracle database [Follow these steps.](https://learn.microsoft.com/en-us/azure/virtual-machines/workloads/oracle/oracle-database-quick-create) + + 2. Login to Oracle database created from the above step [Follow these steps.](https://docs.oracle.com/cd/F49540_01/DOC/server.815/a67772/create.htm) + + 3. Enable unified logging over syslog by **Alter the system to enable unified logging** [Following these steps.](https://docs.oracle.com/en/database/oracle/oracle-database/21/refrn/UNIFIED_AUDIT_COMMON_SYSTEMLOG.html#GUID-9F26BC8E-1397-4B0E-8A08-3B12E4F9ED3A) + + 4. Create and **enable an Audit policy for unified auditing** [Follow these steps.](https://docs.oracle.com/en/database/oracle/oracle-database/19/sqlrf/CREATE-AUDIT-POLICY-Unified-Auditing.html#GUID-8D6961FB-2E50-46F5-81F7-9AEA314FC693) + + 5. **Enabling syslog and Event Viewer** Captures for the Unified Audit Trail [Follow these steps.](https://docs.oracle.com/en/database/oracle/oracle-database/18/dbseg/administering-the-audit-trail.html#GUID-3EFB75DB-AE1C-44E6-B46E-30E5702B0FC4) + | | | |--------------------------|---| | **Tables Ingested** | `Syslog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/oracleweblogicserver.md b/Tools/Solutions Analyzer/connector-docs/solutions/oracleweblogicserver.md index 96e22adbaa9..1f830924b2e 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/oracleweblogicserver.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/oracleweblogicserver.md @@ -21,6 +21,62 @@ This solution provides **1 data connector(s)**. OracleWebLogicServer data connector provides the capability to ingest [OracleWebLogicServer](https://docs.oracle.com/en/middleware/standalone/weblogic-server/index.html) events into Microsoft Sentinel. Refer to [OracleWebLogicServer documentation](https://docs.oracle.com/en/middleware/standalone/weblogic-server/14.1.1.0/index.html) for more information. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias OracleWebLogicServerEvent and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OracleWebLogicServer/Parsers/OracleWebLogicServerEvent.yaml). The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux or Windows** + +Install the agent on the Oracle WebLogic Server where the logs are generated. + +> Logs from Oracle WebLogic Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure the logs to be collected** + +Configure the custom log directory to be collected +- **Open custom logs settings** + +1. Select the link above to open your workspace advanced settings +2. From the left pane, select **Data**, select **Custom Logs** and click **Add+** +3. Click **Browse** to upload a sample of a OracleWebLogicServer log file (e.g. server.log). Then, click **Next >** +4. Select **New line** as the record delimiter and click **Next >** +5. Select **Windows** or **Linux** and enter the path to OracleWebLogicServer logs based on your configuration. Example: + - **Linux** Directory: 'DOMAIN_HOME/servers/server_name/logs/*.log' + - **Windows** Directory: 'DOMAIN_NAME\servers\SERVER_NAME\logs\*.log' +6. After entering the path, click the '+' symbol to apply, then click **Next >** +7. Add **OracleWebLogicServer_CL** as the custom log Name and click **Done** + | | | |--------------------------|---| | **Tables Ingested** | `OracleWebLogicServer_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/orca-security-alerts.md b/Tools/Solutions Analyzer/connector-docs/solutions/orca-security-alerts.md index 0cb01947652..e07d40c0159 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/orca-security-alerts.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/orca-security-alerts.md @@ -21,6 +21,22 @@ This solution provides **1 data connector(s)**. The Orca Security Alerts connector allows you to easily export Alerts logs to Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Follow [guidance](https://orcasecurity.zendesk.com/hc/en-us/articles/360043941992-Azure-Sentinel-configuration) for integrating Orca Security Alerts logs with Microsoft Sentinel. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `OrcaAlerts_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/ossec.md b/Tools/Solutions Analyzer/connector-docs/solutions/ossec.md index ed02e20e77a..c092d2becdb 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/ossec.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/ossec.md @@ -25,6 +25,61 @@ This solution provides **2 data connector(s)**. OSSEC data connector provides the capability to ingest [OSSEC](https://www.ossec.net/) events into Microsoft Sentinel. Refer to [OSSEC documentation](https://www.ossec.net/docs) for more information. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias OSSEC and load the function code or click [here](https://aka.ms/sentinel-OSSECEvent-parser), on the second line of the query, enter the hostname(s) of your OSSEC device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Common Event Format (CEF) logs to Syslog agent** + + [Follow these steps](https://www.ossec.net/docs/docs/manual/output/syslog-output.html) to configure OSSEC sending alerts via syslog. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/palo-alto---xdr-(cortex).md b/Tools/Solutions Analyzer/connector-docs/solutions/palo-alto---xdr-(cortex).md index 9e3baf64474..a0727ea983c 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/palo-alto---xdr-(cortex).md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/palo-alto---xdr-(cortex).md @@ -18,6 +18,65 @@ This solution provides **1 data connector(s)**. The Palo Alto Networks Cortex XDR connector gives you an easy way to connect to your Cortex XDR logs with Microsoft Sentinel. This increases the visibility of your endpoint security. It will give you better ability to monitor your resources by creating custom Workbooks, analytics rules, Incident investigation, and evidence gathering. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Azure Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Azure Sentinel will use as the proxy between your security solution and Azure Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Azure Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Palo Alto Networks (Cortex) logs to Syslog agent** + +> 1. Go to [Cortex Settings and Configurations](https://inspira.xdr.in.paloaltonetworks.com/configuration/external-alerting) and Click to add New Server under External Applications. + +> 2. Then specify the name and Give public IP of your syslog server in Destination. + +> 3. Give Port number as 514 and from Facility field select FAC_SYSLOG from dropdown. + +> 4. Select Protocol as UDP and hit Create. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/palo-alto-cortex-xdr-ccp.md b/Tools/Solutions Analyzer/connector-docs/solutions/palo-alto-cortex-xdr-ccp.md index 935ba7838d2..b0b5dc37bc7 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/palo-alto-cortex-xdr-ccp.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/palo-alto-cortex-xdr-ccp.md @@ -21,6 +21,34 @@ This solution provides **1 data connector(s)**. The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +#### Configuration steps for the Palo Alto Cortex XDR API + Follow the instructions to obtain the credentials. you can also follow this [guide](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/3u3j0e7hcx8t1-get-started-with-cortex-xdr-ap-is) to generate API key. +#### 1. Retrieve API URL + 1.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials + 1.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] + 1.3. Under [**Integrations**] click on [**API Keys**]. + 1.4. In the [**Settings**] Page click on [**Copy API URL**] in the top right corner. +#### 2. Retrieve API Token + 2.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials + 2.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] + 2.3. Under [**Integrations**] click on [**API Keys**]. + 2.4. In the [**Settings**] Page click on [**New Key**] in the top right corner. + 2.5. Choose security level, role, choose Standard and click on [**Generate**] + 2.6. Copy the API Token, once it generated the [**API Token ID**] can be found under the ID column +- **Base API URL**: https://api-example.xdr.au.paloaltonetworks.com +- **API Key ID**: API ID +- **API Token**: (password field) +- Click 'Connect' to establish connection + | | | |--------------------------|---| | **Tables Ingested** | `PaloAltoCortexXDR_Alerts_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/palo-alto-cortex-xpanse-ccf.md b/Tools/Solutions Analyzer/connector-docs/solutions/palo-alto-cortex-xpanse-ccf.md index d74972e84cb..15ca611c28a 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/palo-alto-cortex-xpanse-ccf.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/palo-alto-cortex-xpanse-ccf.md @@ -21,6 +21,43 @@ This solution provides **1 data connector(s)**. The Palo Alto Cortex Xpanse data connector ingests alerts data into Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Palo Alto Xpanse to Microsoft Sentinel** + +To ingest data from Palo Alto Cortex Xpanse to Microsoft Sentinel, click on **Add Domain**. Fill in the required details in the pop-up and click Connect. You will see connected domain endpoints in the grid below. To get the Auth ID and API Key, go to **Settings → Configuration → Integrations → API Keys** in the Cortex Xpanse portal and generate new credentials. +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Endpoint** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + +**Add domain** + +*Add domain* + +When you click the "Add domain" button in the portal, a configuration form will open. You'll need to provide: + +- **Domain Name** (optional): e.g., example.crtx.us.paloaltonetworks.com +- **API Key** (optional): Enter your Palo Alto Xpanse API Key +- **Xpanse Auth ID** (optional): Enter your Xpanse Auth ID + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + | | | |--------------------------|---| | **Tables Ingested** | `CortexXpanseAlerts_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/palo-alto-prisma-cloud-cwpp.md b/Tools/Solutions Analyzer/connector-docs/solutions/palo-alto-prisma-cloud-cwpp.md index f374d5808bf..1922867873d 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/palo-alto-prisma-cloud-cwpp.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/palo-alto-prisma-cloud-cwpp.md @@ -25,6 +25,28 @@ This solution provides **2 data connector(s)**. The [Palo Alto Prisma Cloud CWPP](https://prisma.pan.dev/api/cloud/cwpp/audits/#operation/get-audits-incidents) data connector allows you to connect to your Prisma Cloud CWPP instance and ingesting alerts into Microsoft Sentinel. The data connector is built on Microsoft Sentinel’s Codeless Connector Platform and uses the Prisma Cloud API to fetch security events and supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +**Custom Permissions:** +- **PrismaCloudCompute API Key**: A Palo Alto Prisma Cloud CWPP Monitor API username and password is required. [See the documentation to learn more about PrismaCloudCompute SIEM API](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Prisma%20Cloud%20CWPP/Data%20Connectors/readme.md). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Palo Alto Prisma Cloud CWPP Security Events to Microsoft Sentinel** + +To enable the Palo Alto Prisma Cloud CWPP Security Events for Microsoft Sentinel, provide the required information below and click on Connect. +> +- **Path to console**: https://europe-west3.cloud.twistlock.com/{sasid} +- **Prisma Access Key (API)**: Prisma Access Key (API) +- **Secret**: (password field) +- Click 'Connect' to establish connection + | | | |--------------------------|---| | **Tables Ingested** | `PrismaCloudCompute_CL` | @@ -38,6 +60,6 @@ This solution ingests data into **1 table(s)**: | Table | Used By Connectors | |-------|-------------------| -| `PrismaCloudCompute_CL` | [Palo Alto Prisma Cloud CWPP (using REST API)](../connectors/prismacloudcomputenativepoller.md), [Palo Alto Prisma Cloud CWPP (using REST API)](../connectors/paloaltoprismacloudcwpp.md) | +| `PrismaCloudCompute_CL` | [Palo Alto Prisma Cloud CWPP (using REST API)](../connectors/paloaltoprismacloudcwpp.md), [Palo Alto Prisma Cloud CWPP (using REST API)](../connectors/prismacloudcomputenativepoller.md) | [← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/paloalto-pan-os.md b/Tools/Solutions Analyzer/connector-docs/solutions/paloalto-pan-os.md index eb8f13d3fa0..4e238bbfc5b 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/paloalto-pan-os.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/paloalto-pan-os.md @@ -26,6 +26,70 @@ This solution provides **2 data connector(s)**. The Palo Alto Networks firewall connector allows you to easily connect your Palo Alto Networks logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Palo Alto Networks logs to Syslog agent** + + Configure Palo Alto Networks to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent. + +Go to [configure Palo Alto Networks NGFW for sending CEF events.](https://aka.ms/sentinel-paloaltonetworks-readme) + +Go to [Palo Alto CEF Configuration](https://aka.ms/asi-syslog-paloalto-forwarding) and Palo Alto [Configure Syslog Monitoring](https://aka.ms/asi-syslog-paloalto-configure) steps 2, 3, choose your version, and follow the instructions using the following guidelines: + +1. Set the Syslog server format to **BSD**. + +2. The copy/paste operations from the PDF might change the text and insert random characters. To avoid this, copy the text to an editor and remove any characters that might break the log format before pasting it. + +[Learn more >](https://aka.ms/CEFPaloAlto) + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/paloaltocdl.md b/Tools/Solutions Analyzer/connector-docs/solutions/paloaltocdl.md index 1fbcd8b5627..c98c8f91edd 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/paloaltocdl.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/paloaltocdl.md @@ -25,6 +25,61 @@ This solution provides **2 data connector(s)**. The [Palo Alto Networks CDL](https://www.paloaltonetworks.com/cortex/cortex-data-lake) data connector provides the capability to ingest [CDL logs](https://docs.paloaltonetworks.com/strata-logging-service/log-reference/log-forwarding-schema-overview) into Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**PaloAltoCDLEvent**](https://aka.ms/sentinel-paloaltocdl-parser) which is deployed with the Microsoft Sentinel Solution. +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Configure Cortex Data Lake to forward logs to a Syslog Server using CEF** + + [Follow the instructions](https://docs.paloaltonetworks.com/cortex/cortex-data-lake/cortex-data-lake-getting-started/get-started-with-log-forwarding-app/forward-logs-from-logging-service-to-syslog-server.html) to configure logs forwarding from Cortex Data Lake to a Syslog Server. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/paloaltoprismacloud.md b/Tools/Solutions Analyzer/connector-docs/solutions/paloaltoprismacloud.md index c0672f65245..7a6df4281e3 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/paloaltoprismacloud.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/paloaltoprismacloud.md @@ -25,6 +25,35 @@ This solution provides **2 data connector(s)**. The Palo Alto Prisma Cloud CSPM data connector allows you to connect to your Palo Alto Prisma Cloud CSPM instance and ingesting Alerts (https://pan.dev/prisma-cloud/api/cspm/alerts/) & Audit Logs(https://pan.dev/prisma-cloud/api/cspm/audit-logs/) into Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Palo Alto Prisma Cloud CSPM Events to Microsoft Sentinel** + +To get more information on how to obtain the Prisma Cloud Access Key, Secret Key, and Base URL, please refer to the[connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud/Data%20Connectors/Readme.md), provide the required information below and click on Connect. +> +- **Prisma Cloud Access Key**: Enter Access Key +- **Prisma Cloud Secret Key**: (password field) +- **Prisma Cloud Base URL**: https://api2.eu.prismacloud.io +- Click 'Connect' to establish connection +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **PaloAltoPrismaCloudCSPM Api Endpoints** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + | | | |--------------------------|---| | **Tables Ingested** | `PaloAltoPrismaCloudAlertV2_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/pathlock-tdnr.md b/Tools/Solutions Analyzer/connector-docs/solutions/pathlock-tdnr.md index ca7608969ed..cbc45315ad7 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/pathlock-tdnr.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/pathlock-tdnr.md @@ -33,6 +33,44 @@ This precision-driven approach helps security teams drastically reduce false pos By combining business-context intelligence with advanced analytics, Pathlock enables enterprises to strengthen detection accuracy, streamline response actions, and maintain continuous control across their SAP environments—without adding complexity or redundant monitoring layers. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +**Custom Permissions:** +- **Microsoft Entra**: Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher. +- **Microsoft Azure**: Permission to assign Monitoring Metrics Publisher role on data collection rules. Typically requires Azure RBAC Owner or User Access Administrator role. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Create ARM Resources and Provide the Required Permissions** + +We will create data collection rule (DCR) and data collection endpoint (DCE) resources. We will also create a Microsoft Entra app registration and assign the required permissions to it. +#### Automated deployment of Azure resources +Clicking on "Deploy push connector resources" will trigger the creation of DCR and DCE resources. +It will then create a Microsoft Entra app registration with client secret and grant permissions on the DCR. This setup enables data to be sent securely to the DCR using a OAuth v2 client credentials. +- Deploy push connector resources + Application: Pathlock Inc. Threat Detection and Response for SAP + +**2. Maintain the data collection endpoint details and authentication info in your central instance of Pathlock's Cybersecurity Application Controls: Threat Detection and Response** + +Share the data collection endpoint URL and authentication info with the Pathlock administrator to configure the plug and play forwarding in Threat Detection and Response to send data to the data collection endpoint. +Please do not hesitate to contact Pathlock if support is needed. +- **Use this value to configure as Tenant ID in the LogIngestionAPI credential.**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Entra Application ID**: `ApplicationId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Entra Application Secret**: `ApplicationSecret` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Use this value to configure the LogsIngestionURL parameter when deploying the IFlow.**: `DataCollectionEndpoint` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **DCR Immutable ID**: `DataCollectionRuleId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `ABAPAuditLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/perimeter-81.md b/Tools/Solutions Analyzer/connector-docs/solutions/perimeter-81.md index 64fb34884b6..8ff9c60b51d 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/perimeter-81.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/perimeter-81.md @@ -21,6 +21,22 @@ This solution provides **1 data connector(s)**. The Perimeter 81 Activity Logs connector allows you to easily connect your Perimeter 81 activity logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Please note the values below and follow the instructions here to connect your Perimeter 81 activity logs with Microsoft Sentinel. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `Perimeter81_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/phosphorus.md b/Tools/Solutions Analyzer/connector-docs/solutions/phosphorus.md index 7b06bd3b441..873b1b9274b 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/phosphorus.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/phosphorus.md @@ -22,6 +22,39 @@ This solution provides **1 data connector(s)**. The Phosphorus Device Connector provides the capability to Phosphorus to ingest device data logs into Microsoft Sentinel through the Phosphorus REST API. The Connector provides visibility into the devices enrolled in Phosphorus. This Data Connector pulls devices information along with its corresponding alerts. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Custom Permissions:** +- **REST API Credentials/permissions**: **Phosphorus API Key** is required. Please make sure that the API Key associated with the User has the Manage Settings permissions enabled. + + Follow these instructions to enable Manage Settings permissions. + 1. Log in to the Phosphorus Application + 2. Go to 'Settings' -> 'Groups' + 3. Select the Group the Integration user is a part of + 4. Navigate to 'Product Actions' -> toggle on the 'Manage Settings' permission. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**STEP 1 - Configuration steps for the Phosphorus API** + + Follow these instructions to create a Phosphorus API key. + 1. Log into your Phosphorus instance + 2. Navigate to Settings -> API + 3. If the API key has not already been created, press the **Add button** to create the API key + 4. The API key can now be copied and used during the Phosphorus Device connector configuration + +**2. Connect the Phosphorus Application with Microsoft Sentinel** + +**STEP 2 - Fill in the details below** + +>**IMPORTANT:** Before deploying the Phosphorus Device data connector, have the Phosphorus Instance Domain Name readily available as well as the Phosphorus API Key(s) +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `APIKey`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + | | | |--------------------------|---| | **Tables Ingested** | `Phosphorus_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/pingfederate.md b/Tools/Solutions Analyzer/connector-docs/solutions/pingfederate.md index e73f9d37a6b..57405d98f66 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/pingfederate.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/pingfederate.md @@ -25,6 +25,61 @@ This solution provides **2 data connector(s)**. The [PingFederate](https://www.pingidentity.com/en/software/pingfederate.html) data connector provides the capability to ingest [PingFederate events](https://docs.pingidentity.com/bundle/pingfederate-102/page/lly1564002980532.html) into Microsoft Sentinel. Refer to [PingFederate documentation](https://docs.pingidentity.com/bundle/pingfederate-102/page/tle1564002955874.html) for more information. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**PingFederateEvent**](https://aka.ms/sentinel-PingFederate-parser) which is deployed with the Microsoft Sentinel Solution. +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Common Event Format (CEF) logs to Syslog agent** + + [Follow these steps](https://docs.pingidentity.com/bundle/pingfederate-102/page/gsn1564002980953.html) to configure PingFederate sending audit log via syslog in CEF format. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/pingone.md b/Tools/Solutions Analyzer/connector-docs/solutions/pingone.md index 8d36b73196a..807557a6d0f 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/pingone.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/pingone.md @@ -22,6 +22,47 @@ This solution provides **1 data connector(s)**. This connector ingests **audit activity logs** from the PingOne Identity platform into Microsoft Sentinel using a Codeless Connector Framework. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Ping One connector to Microsoft Sentinel** +Before connecting to PingOne, ensure the following prerequisites are completed. Refer to the [document](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PingOne/README.md) for detailed setup instructions, including how to obtain client credentials and the environment ID. +#### 1. Client Credentials + You'll need client credentials, including your client id and client secret. +#### 2. Environment Id + To generate token and gather logs from audit activities endpoint +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Environment ID** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + +**Add domain** + +*Add domain* + +When you click the "Add domain" button in the portal, a configuration form will open. You'll need to provide: + +- **Client ID** (optional): Enter ID of the client +- **Client Secret** (optional): Enter your secret key +- **Environment ID** (optional): Enter your environment Id +- **Api domain** (optional): Enter your Api domain Eg.( pingone.com,pingone.eu etc )depending on the region credentials created for + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + | | | |--------------------------|---| | **Tables Ingested** | `PingOne_AuditActivitiesV2_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/postgresql.md b/Tools/Solutions Analyzer/connector-docs/solutions/postgresql.md index 58e0cd604d6..792293cf533 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/postgresql.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/postgresql.md @@ -21,6 +21,74 @@ This solution provides **1 data connector(s)**. PostgreSQL data connector provides the capability to ingest [PostgreSQL](https://www.postgresql.org/) events into Microsoft Sentinel. Refer to [PostgreSQL documentation](https://www.postgresql.org/docs/current/index.html) for more information. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on PostgreSQL parser based on a Kusto Function to work as expected. This parser is installed along with solution installation. + +**1. Install and onboard the agent for Linux or Windows** + +Install the agent on the Tomcat Server where the logs are generated. + +> Logs from PostgreSQL Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure PostgreSQL to write logs to files** + +1. Edit postgresql.conf file to write logs to files: + +>**log_destination** = 'stderr' + +>**logging_collector** = on + +Set the following parameters: **log_directory** and **log_filename**. Refer to the [PostgreSQL documentation for more details](https://www.postgresql.org/docs/current/runtime-config-logging.html) + +**3. Configure the logs to be collected** + +Configure the custom log directory to be collected +- **Open custom logs settings** + +1. Select the link above to open your workspace advanced settings +2. From the left pane, select **Settings**, select **Custom Logs** and click **+Add custom log** +3. Click **Browse** to upload a sample of a PostgreSQL log file. Then, click **Next >** +4. Select **Timestamp** as the record delimiter and click **Next >** +5. Select **Windows** or **Linux** and enter the path to PostgreSQL logs based on your configuration(e.g. for some Linux distros the default path is /var/log/postgresql/) +6. After entering the path, click the '+' symbol to apply, then click **Next >** +7. Add **PostgreSQL** as the custom log Name (the '_CL' suffix will be added automatically) and click **Done**. + +**2. Validate connectivity** + +It may take upwards of 20 minutes until your logs start to appear in Microsoft Sentinel. + | | | |--------------------------|---| | **Tables Ingested** | `PostgreSQL_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/prancer-pensuiteai-integration.md b/Tools/Solutions Analyzer/connector-docs/solutions/prancer-pensuiteai-integration.md index d24d167e4c4..c4a495621c6 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/prancer-pensuiteai-integration.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/prancer-pensuiteai-integration.md @@ -21,6 +21,29 @@ This solution provides **1 data connector(s)**. The Prancer Data Connector has provides the capability to ingest Prancer (CSPM)[https://docs.prancer.io/web/CSPM/] and [PAC](https://docs.prancer.io/web/PAC/introduction/) data to process through Microsoft Sentinel. Refer to [Prancer Documentation](https://docs.prancer.io/web) for more information. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Include custom pre-requisites if the connectivity requires - else delete customs**: Description for any custom pre-requisite + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Prancer REST API to pull logs into Microsoft sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +STEP 1: Follow the documentation on the [Prancer Documentation Site](https://docs.prancer.io/web/) in order to set up an scan with an azure cloud connector. + +STEP 2: Once the scan is created go to the 'Third Part Integrations' menu for the scan and select Sentinel. + +STEP 3: Create follow the configuration wizard to select where in Azure the results should be sent to. + +STEP 4: Data should start to get fed into Microsoft Sentinel for processing. + | | | |--------------------------|---| | **Tables Ingested** | `prancer_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/proofpoint-on-demand(pod)-email-security.md b/Tools/Solutions Analyzer/connector-docs/solutions/proofpoint-on-demand(pod)-email-security.md index e4e962364ec..1b76df76c80 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/proofpoint-on-demand(pod)-email-security.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/proofpoint-on-demand(pod)-email-security.md @@ -25,6 +25,95 @@ This solution provides **2 data connector(s)**. Proofpoint On Demand Email Security data connector provides the capability to get Proofpoint on Demand Email Protection data, allows users to check message traceability, monitoring into email activity, threats,and data exfiltration by attackers and malicious insiders. The connector provides ability to review events in your org on an accelerated basis, get event log files in hourly increments for recent activity.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

+**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Websocket API Credentials/permissions**: **ProofpointClusterID**, **ProofpointToken** is required. [See the documentation to learn more about API](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Proofpoint Websocket API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-proofpointpod-parser) to create the Kusto functions alias, **ProofpointPOD** + +**STEP 1 - Configuration steps for the Proofpoint Websocket API** + +1. Proofpoint Websocket API service requires Remote Syslog Forwarding license. Please refer the [documentation](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API) on how to enable and check PoD Log API. +2. You must provide your cluster id and security token. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Proofpoint On Demand Email Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Proofpoint POD Log API credentials, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Proofpoint On Demand Email Security data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-proofpointpod-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID**, **Workspace Key**, **ProofpointClusterID**, **ProofpointToken** and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Proofpoint On Demand Email Security data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> NOTE:You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-proofpointpod-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ProofpointXXXXX). + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + ProofpointClusterID + ProofpointToken + WorkspaceID + WorkspaceKey + logAnalyticsUri (optional) + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +3. Once all application settings have been entered, click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `ProofpointPODMessage_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/proofpointtap.md b/Tools/Solutions Analyzer/connector-docs/solutions/proofpointtap.md index 345b4e3972b..ba1bc44e372 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/proofpointtap.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/proofpointtap.md @@ -25,6 +25,32 @@ This solution provides **2 data connector(s)**. The [Proofpoint Targeted Attack Protection (TAP)](https://www.proofpoint.com/us/products/advanced-threat-protection/targeted-attack-protection) connector provides the capability to ingest Proofpoint TAP logs and events into Microsoft Sentinel. The connector provides visibility into Message and Click events in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. + +**Custom Permissions:** +- **Proofpoint TAP API Key**: A Proofpoint TAP API service principal and secret is required to access Proofpoint's SIEM API. [See the documentation to learn more about Proofpoint SIEM API](https://help.proofpoint.com/Threat_Insight_Dashboard/API_Documentation/SIEM_API). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**Configuration steps for the Proofpoint TAP API** + +1. Log into the [Proofpoint TAP dashboard](https://threatinsight.proofpoint.com/) +2. Navigate to **Settings** and go to **Connected Applications** tab + 3. Click on **Create New Credential** + 4. Provide a name and click **Generate** + 5. Copy **Service Principal** and **Secret** values + +>**NOTE:** This connector depends on a parser based on Kusto Function to work as expected [**ProofpointTAPEvent**](https://aka.ms/sentinel-ProofpointTAPDataConnector-parser) which is deployed with the Microsoft Sentinel Solution. + +- **Service Principal**: 123456 +- **Secret**: (password field) +- Click 'Connect' to establish connection + | | | |--------------------------|---| | **Tables Ingested** | `ProofPointTAPClicksBlockedV2_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/pulse-connect-secure.md b/Tools/Solutions Analyzer/connector-docs/solutions/pulse-connect-secure.md index 18c30c22cf9..9077c52ea8f 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/pulse-connect-secure.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/pulse-connect-secure.md @@ -21,6 +21,49 @@ This solution provides **1 data connector(s)**. The [Pulse Connect Secure](https://www.pulsesecure.net/products/pulse-connect-secure/) connector allows you to easily connect your Pulse Connect Secure logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigations. Integrating Pulse Connect Secure with Microsoft Sentinel provides more insight into your organization's network and improves your security operation capabilities. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +**Custom Permissions:** +- **Pulse Connect Secure**: must be configured to export logs via Syslog + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Pulse Connect Secure and load the function code or click [here](https://aka.ms/sentinel-PulseConnectSecure-parser), on the second line of the query, enter the hostname(s) of your Pulse Connect Secure device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. + 2. Select **Apply below configuration to my machines** and select the facilities and severities. + 3. Click **Save**. +- **Open Syslog settings** + +**3. Configure and connect the Pulse Connect Secure** + +[Follow the instructions](https://help.ivanti.com/ps/help/en_US/PPS/9.1R13/ag/configuring_an_external_syslog_server.htm) to enable syslog streaming of Pulse Connect Secure logs. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address. + | | | |--------------------------|---| | **Tables Ingested** | `Syslog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/qualys-vm-knowledgebase.md b/Tools/Solutions Analyzer/connector-docs/solutions/qualys-vm-knowledgebase.md index 3fc9df04603..1731bbc79ef 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/qualys-vm-knowledgebase.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/qualys-vm-knowledgebase.md @@ -25,6 +25,87 @@ The [Qualys Vulnerability Management (VM)](https://www.qualys.com/apps/vulnerabi This data can used to correlate and enrich vulnerability detections found by the [Qualys Vulnerability Management (VM)](https://docs.microsoft.com/azure/sentinel/connect-qualys-vm) data connector. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Qualys API Key**: A Qualys VM API username and password is required. [See the documentation to learn more about Qualys VM API](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias QualysVM Knowledgebase and load the function code or click [here](https://aka.ms/sentinel-crowdstrikefalconendpointprotection-parser), on the second line of the query, enter the hostname(s) of your QualysVM Knowledgebase device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. + +>This data connector depends on a parser based on a Kusto Function to work as expected. [Follow the steps](https://aka.ms/sentinel-qualyskb-parser) to use the Kusto function alias, **QualysKB** + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Configuration steps for the Qualys API** + +1. Log into the Qualys Vulnerability Management console with an administrator account, select the **Users** tab and the **Users** subtab. +2. Click on the **New** drop-down menu and select **Users**. +3. Create a username and password for the API account. +4. In the **User Roles** tab, ensure the account role is set to **Manager** and access is allowed to **GUI** and **API** +4. Log out of the administrator account and log into the console with the new API credentials for validation, then log out of the API account. +5. Log back into the console using an administrator account and modify the API accounts User Roles, removing access to **GUI**. +6. Save all changes. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Qualys KB connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Qualys API username and password, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Option 1 - Azure Resource Manager (ARM) Template** + + Use this method for automated deployment of the Qualys KB connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-qualyskb-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-qualyskb-azuredeploy-gov) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password** , update the **URI**, and any additional URI **Filter Parameters** (This value should include a "&" symbol between each parameter and should not include any spaces) +> - Enter the URI that corresponds to your region. The complete list of API Server URLs can be [found here](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf#G4.735348) +> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + - Note: If deployment failed due to the storage account name being taken, change the **Function Name** to a unique value and redeploy. + + **Option 2 - Manual Deployment of Azure Functions** + + This method provides the step-by-step instructions to deploy the Qualys KB connector manually with Azure Function. +**Step 1 - Deploy a Function App** + + 1. Download the [Azure Function App](https://aka.ms/sentinel-qualyskb-functioncode) file. Extract archive to your local development computer. +2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode. +3. After successful deployment of the function app, follow next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. Go to Azure Portal for the Function App configuration. +2. In the Function App, select the Function App Name and select **Configuration**. +3. In the **Application settings** tab, select **+ New application setting**. +4. Add each of the following seven (7) application settings individually, with their respective string values (case-sensitive): + apiUsername + apiPassword + workspaceID + workspaceKey + uri + filterParameters + logAnalyticsUri (optional) +> - Enter the URI that corresponds to your region. The complete list of API Server URLs can be [found here](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf#G4.735348). The `uri` value must follow the following schema: `https:///api/2.0` +> - Add any additional filter parameters, for the `filterParameters` variable, that need to be appended to the URI. The `filterParameter` value should include a "&" symbol between each parameter and should not include any spaces. +> - Note: If using Azure Key Vault, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. + - Use logAnalyticsUri to override the log analytics API endpoint for delegated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +5. Once all application settings have been entered, click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `QualysKB_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/qualysvm.md b/Tools/Solutions Analyzer/connector-docs/solutions/qualysvm.md index d782201d2ca..c943a62ab32 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/qualysvm.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/qualysvm.md @@ -30,6 +30,110 @@ The [Qualys Vulnerability Management (VM)](https://www.qualys.com/apps/vulnerabi

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

+**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Qualys API Key**: A Qualys VM API username and password is required. [See the documentation to learn more about Qualys VM API](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to Qualys VM to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Configuration steps for the Qualys VM API** + +1. Log into the Qualys Vulnerability Management console with an administrator account, select the **Users** tab and the **Users** subtab. +2. Click on the **New** drop-down menu and select **Users..** +3. Create a username and password for the API account. +4. In the **User Roles** tab, ensure the account role is set to **Manager** and access is allowed to **GUI** and **API** +4. Log out of the administrator account and log into the console with the new API credentials for validation, then log out of the API account. +5. Log back into the console using an administrator account and modify the API accounts User Roles, removing access to **GUI**. +6. Save all changes. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Qualys VM connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Qualys VM API Authorization Key(s), readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +>**NOTE:** This connector has been updated, if you have previously deployed an earlier version, and want to update, please delete the existing Qualys VM Azure Function before redeploying this version. Please use Qualys V2 version Workbook, detections. + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Qualys VM connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-QualysVM-azuredeployV2) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-QualysVM-azuredeployV2-gov) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password** , update the **URI**, and any additional URI **Filter Parameters** (each filter should be separated by an "&" symbol, no spaces.) +> - Enter the URI that corresponds to your region. The complete list of API Server URLs can be [found here](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf#G4.735348) -- There is no need to add a time suffix to the URI, the Function App will dynamically append the Time Value to the URI in the proper format. + - The default **Time Interval** is set to pull the last five (5) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. +> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Quayls VM connector manually with Azure Functions. + +**1. Create a Function App** + +1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp), and select **+ Add**. +2. In the **Basics** tab, ensure Runtime stack is set to **Powershell Core**. +3. In the **Hosting** tab, ensure the **Consumption (Serverless)** plan type is selected. +4. Make other preferrable configuration changes, if needed, then click **Create**. + +**2. Import Function App Code** + +1. In the newly created Function App, select **Functions** on the left pane and click **+ New Function**. +2. Select **Timer Trigger**. +3. Enter a unique Function **Name** and leave the default cron schedule of every 5 minutes, then click **Create**. +5. Click on **Code + Test** on the left pane. +6. Copy the [Function App Code](https://aka.ms/sentinel-QualysVM-functioncodeV2) and paste into the Function App `run.ps1` editor. +7. Click **Save**. + +**3. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following eight (8) application settings individually, with their respective string values (case-sensitive): + apiUsername + apiPassword + workspaceID + workspaceKey + uri + filterParameters + timeInterval + logAnalyticsUri (optional) +> - Enter the URI that corresponds to your region. The complete list of API Server URLs can be [found here](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf#G4.735348). The `uri` value must follow the following schema: `https:///api/2.0/fo/asset/host/vm/detection/?action=list&vm_processed_after=` -- There is no need to add a time suffix to the URI, the Function App will dynamically append the Time Value to the URI in the proper format. +> - Add any additional filter parameters, for the `filterParameters` variable, that need to be appended to the URI. Each parameter should be seperated by an "&" symbol and should not include any spaces. +> - Set the `timeInterval` (in minutes) to the value of `5` to correspond to the Timer Trigger of every `5` minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly to prevent overlapping data ingestion. +> - Note: If using Azure Key Vault, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. +> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + +**4. Configure the host.json**. + +Due to the potentially large amount of Qualys host detection data being ingested, it can cause the execution time to surpass the default Function App timeout of five (5) minutes. Increase the default timeout duration to the maximum of ten (10) minutes, under the Consumption Plan, to allow more time for the Function App to execute. + +1. In the Function App, select the Function App Name and select the **App Service Editor** blade. +2. Click **Go** to open the editor, then select the **host.json** file under the **wwwroot** directory. +3. Add the line `"functionTimeout": "00:10:00",` above the `managedDependancy` line +4. Ensure **SAVED** appears on the top right corner of the editor, then exit the editor. + +> NOTE: If a longer timeout duration is required, consider upgrading to an [App Service Plan](https://docs.microsoft.com/azure/azure-functions/functions-scale#timeout) + | | | |--------------------------|---| | **Tables Ingested** | `QualysHostDetectionV2_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/quokka.md b/Tools/Solutions Analyzer/connector-docs/solutions/quokka.md index fe80f14009f..0f57032a284 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/quokka.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/quokka.md @@ -21,6 +21,26 @@ This solution provides **1 data connector(s)**. Ingest Qscout application events into Microsoft Sentinel +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required + +**Custom Permissions:** +- **Qscout organization id**: The API requires your organization ID in Qscout. +- **Qscout organization API key**: The API requires your organization API key in Qscout. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Codeless Connector Framework (CCF) to connect to the Qscout app events feed and ingest data into Microsoft Sentinel + +Provide the required values below: +- **Qscout Organization ID**: 123456 +- **Qscout Organization API Key**: abcdxyz +- Click 'Connect' to establish connection + | | | |--------------------------|---| | **Tables Ingested** | `QscoutAppEvents_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/radiflow.md b/Tools/Solutions Analyzer/connector-docs/solutions/radiflow.md index 1fd49a24fa8..6d1759eca1f 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/radiflow.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/radiflow.md @@ -21,6 +21,67 @@ This solution provides **1 data connector(s)**. iSID enables non-disruptive monitoring of distributed ICS networks for changes in topology and behavior, using multiple security packages, each offering a unique capability pertaining to a specific type of network activity +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**RadiflowEvent**] which is deployed with the Microsoft Sentinel Solution. +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade. + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule). + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy._ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine. + + **Step B. Configure iSID to send logs using CEF** + + Configure log forwarding using CEF: + +1. Navigate to the **System Notifications** section of the Configuration menu. + +2. Under Syslog, select **+Add**. + +3. In the **New Syslog Server** dialog specify the name, remote server **IP**, **Port**, **Transport** and select **Format** - **CEF**. + +4. Press **Apply** to exit the **Add Syslog dialog**. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python --version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/rapid7insightvm.md b/Tools/Solutions Analyzer/connector-docs/solutions/rapid7insightvm.md index 4eba9e1c277..5eb1f128249 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/rapid7insightvm.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/rapid7insightvm.md @@ -21,6 +21,74 @@ This solution provides **1 data connector(s)**. The [Rapid7 Insight VM](https://www.rapid7.com/products/insightvm/) Report data connector provides the capability to ingest Scan reports and vulnerability data into Microsoft Sentinel through the REST API from the Rapid7 Insight platform (Managed in the cloud). Refer to [API documentation](https://docs.rapid7.com/insight/api-overview/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials**: **InsightVMAPIKey** is required for REST API. [See the documentation to learn more about API](https://docs.rapid7.com/insight/api-overview/). Check all [requirements and follow the instructions](https://docs.rapid7.com/insight/managing-platform-api-keys/) for obtaining credentials + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Insight VM API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This data connector depends on a parsers based on a Kusto Function to work as expected [**InsightVMAssets**](https://aka.ms/sentinel-InsightVMAssets-parser) and [**InsightVMVulnerabilities**](https://aka.ms/sentinel-InsightVMVulnerabilities-parser) which is deployed with the Microsoft Sentinel Solution. + +**STEP 1 - Configuration steps for the Insight VM Cloud** + + [Follow the instructions](https://docs.rapid7.com/insight/managing-platform-api-keys/) to obtain the credentials. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Option 1 - Azure Resource Manager (ARM) Template** + + Use this method for automated deployment of the Rapid7 Insight Vulnerability Management Report data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-InsightVMCloudAPI-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. +3. Enter the **InsightVMAPIKey**, choose **InsightVMCloudRegion** and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + + **Option 2 - Manual Deployment of Azure Functions** + + Use the following step-by-step instructions to deploy the Rapid7 Insight Vulnerability Management Report data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + 1. Download the [Azure Function App](https://github.com/averbn/azure_sentinel_data_connectors/raw/main/insight-vm-cloud-azure-sentinel-data-connector/InsightVMCloudAPISentinelConn.zip) file. Extract archive to your local development computer. +2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode. +3. After successful deployment of the function app, follow next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. Go to Azure Portal for the Function App configuration. +2. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + InsightVMAPIKey + InsightVMCloudRegion + WorkspaceID + WorkspaceKey + logAnalyticsUri (optional) +> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +3. Once all application settings have been entered, click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `NexposeInsightVMCloud_assets_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/red-canary.md b/Tools/Solutions Analyzer/connector-docs/solutions/red-canary.md index 5b285e40d2f..33d80d66f56 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/red-canary.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/red-canary.md @@ -22,6 +22,22 @@ This solution provides **1 data connector(s)**. The Red Canary data connector provides the capability to ingest published Detections into Microsoft Sentinel using the Data Collector REST API. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Create an Automate Playbook and Trigger as detailed in [this article](https://help.redcanary.com/hc/en-us/articles/4410957523479-Azure-Sentinel). You can skip the **Add analysis rule to Microsoft Sentinel** section; this data connector allows you to import the analysis rule directly into your workspace. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `RedCanaryDetections_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/ridgesecurity.md b/Tools/Solutions Analyzer/connector-docs/solutions/ridgesecurity.md index cc151002586..b2c5b88e784 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/ridgesecurity.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/ridgesecurity.md @@ -22,6 +22,63 @@ This solution provides **1 data connector(s)**. The RidgeBot connector lets users connect RidgeBot with Microsoft Sentinel, allowing creation of Dashboards, Workbooks, Notebooks and Alerts. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Common Event Format (CEF) logs to Syslog agent** + + Configure the RidgeBot to forward events to syslog server as described here: https://portal.ridgesecurity.ai/downloadurl/89x72912. Generate some attack events for your application. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/rsa-securid.md b/Tools/Solutions Analyzer/connector-docs/solutions/rsa-securid.md index 8b63683c269..55e6f0edc0c 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/rsa-securid.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/rsa-securid.md @@ -21,6 +21,53 @@ This solution provides **1 data connector(s)**. The [RSA® SecurID Authentication Manager](https://www.securid.com/) data connector provides the capability to ingest [RSA® SecurID Authentication Manager events](https://community.rsa.com/t5/rsa-authentication-manager/rsa-authentication-manager-log-messages/ta-p/630160) into Microsoft Sentinel. Refer to [RSA® SecurID Authentication Manager documentation](https://community.rsa.com/t5/rsa-authentication-manager/getting-started-with-rsa-authentication-manager/ta-p/569582) for more information. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**RSASecurIDAMEvent**](https://aka.ms/sentinel-rsasecuridam-parser) which is deployed with the Microsoft Sentinel Solution. + +>**NOTE:** This data connector has been developed using RSA SecurID Authentication Manager version: 8.4 and 8.5 + +**1. Install and onboard the agent for Linux or Windows** + +Install the agent on the Server where the RSA® SecurID Authentication Manager logs are forwarded. + +> Logs from RSA® SecurID Authentication Manager Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure RSA® SecurID Authentication Manager event forwarding** + +Follow the configuration steps below to get RSA® SecurID Authentication Manager logs into Microsoft Sentinel. +1. [Follow these instructions](https://community.rsa.com/t5/rsa-authentication-manager/configure-the-remote-syslog-host-for-real-time-log-monitoring/ta-p/571374) to forward alerts from the Manager to a syslog server. + | | | |--------------------------|---| | **Tables Ingested** | `Syslog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/rsaidplus-adminlogs-connector.md b/Tools/Solutions Analyzer/connector-docs/solutions/rsaidplus-adminlogs-connector.md index 7523dee01b6..9c29e6aea2b 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/rsaidplus-adminlogs-connector.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/rsaidplus-adminlogs-connector.md @@ -21,6 +21,39 @@ This solution provides **1 data connector(s)**. The RSA ID Plus AdminLogs Connector provides the capability to ingest [Cloud Admin Console Audit Events](https://community.rsa.com/s/article/Cloud-Administration-Event-Log-API-5d22ba17) into Microsoft Sentinel using Cloud Admin APIs. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **RSA ID Plus API Authentication**: To access the Admin APIs, a valid Base64URL encoded JWT token, signed with the client's Legacy Administration API key is required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Codeless Connector Framework (CCF) to connect to the RSA ID Plus Cloud Admin APIs to pull logs into Microsoft Sentinel. + +**1. **STEP 1** - Create Legacy Admin API Client in Cloud Admin Console.** + +Follow steps mentioned in this [page](https://community.rsa.com/s/article/Manage-Legacy-Clients-API-Keys-a89c9cbc#). + +**2. **STEP 2** - Generate the Base64URL encoded JWT Token.** + +Follow the steps mentioned in this [page](https://community.rsa.com/s/article/Authentication-for-the-Cloud-Administration-APIs-a04e3fb9) under the header 'Legacy Administration API'. + +**3. **STEP 3** - Configure the Cloud Admin API to start ingesting Admin event logs into Microsoft Sentinel.** + +Provide the required values below: +- **Admin API URL**: https://.access.securid.com/AdminInterface/restapi/v1/adminlog/exportLogs +- **JWT Token**: (password field) + +**4. **STEP 4** - Click Connect** + +Verify all the fields above were filled in correctly. Press Connect to start the connector. +- Click 'Connect' to establish connection + | | | |--------------------------|---| | **Tables Ingested** | `RSAIDPlus_AdminLogs_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/rubriksecuritycloud.md b/Tools/Solutions Analyzer/connector-docs/solutions/rubriksecuritycloud.md index 87e993e5918..dfb2ed9391a 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/rubriksecuritycloud.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/rubriksecuritycloud.md @@ -22,6 +22,123 @@ This solution provides **1 data connector(s)**. The Rubrik Security Cloud data connector enables security operations teams to integrate insights from Rubrik's Data Observability services into Microsoft Sentinel. The insights include identification of anomalous filesystem behavior associated with ransomware and mass deletion, assess the blast radius of a ransomware attack, and sensitive data operators to prioritize and more rapidly investigate potential incidents. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Rubrik webhook which push its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Rubrik Microsoft Sentinel data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**2. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Rubrik connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-RubrikWebhookEvents-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the below information : + Function Name + Workspace ID + Workspace Key + AnomaliesTableName + RansomwareAnalysisTableName + ThreatHuntsTableName + EventsTableName + LogLevel + +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**3. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Rubrik Microsoft Sentinel data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-RubrikWebhookEvents-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. RubrikXXXXX). + + e. **Select a runtime:** Choose Python 3.8 or above. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective values (case-sensitive): + WorkspaceID + WorkspaceKey + AnomaliesTableName + RansomwareAnalysisTableName + ThreatHuntsTableName + EventsTableName + LogLevel + logAnalyticsUri (optional) + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: https://.ods.opinsights.azure.us. +4. Once all application settings have been entered, click **Save**. + +**Post Deployment steps** + +**7. 1) Get the Function app endpoint** + +1. Go to Azure function Overview page and Click on **"Functions"** tab. +2. Click on the function called **"RubrikHttpStarter"**. +3. Go to **"GetFunctionurl"** and copy the function url. + +**8. 2) Add a webhook in RubrikSecurityCloud to send data to Microsoft Sentinel.** + +Follow the Rubrik User Guide instructions to [Add a Webhook](https://docs.rubrik.com/en-us/saas/saas/common/adding_webhook.html) to begin receiving event information + 1. Select the Microsoft Sentinel as the webhook Provider + 2. Enter the desired Webhook name + 3. Enter the URL part from copied Function-url as the webhook URL endpoint and replace **{functionname}** with **"RubrikAnomalyOrchestrator"**, for the Rubrik Microsoft Sentinel Solution + 4. Select the EventType as Anomaly + 5. Select the following severity levels: Critical, Warning, Informational + 6. Choose multiple log types, if desired, when running **"RubrikEventsOrchestrator"** + 7. Repeat the same steps to add webhooks for Anomaly Detection Analysis, Threat Hunt and Other Events. + + + NOTE: while adding webhooks for Anomaly Detection Analysis, Threat Hunt and Other Events, replace **{functionname}** with **"RubrikRansomwareOrchestrator"**, **"RubrikThreatHuntOrchestrator"** and **"RubrikEventsOrchestrator"** respectively in copied function-url. + +*Now we are done with the rubrik Webhook configuration. Once the webhook events triggered , you should be able to see the Anomaly, Anomaly Detection Analysis, Threat Hunt events and Other Events from the Rubrik into respective LogAnalytics workspace table called "Rubrik_Anomaly_Data_CL", "Rubrik_Ransomware_Data_CL", "Rubrik_ThreatHunt_Data_CL", and "Rubrik_Events_Data_CL".* + | | | |--------------------------|---| | **Tables Ingested** | `Rubrik_Anomaly_Data_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/sailpointidentitynow.md b/Tools/Solutions Analyzer/connector-docs/solutions/sailpointidentitynow.md index c5ef36e11ef..dd80db41f92 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/sailpointidentitynow.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/sailpointidentitynow.md @@ -20,6 +20,99 @@ This solution provides **1 data connector(s)**. The [SailPoint](https://www.sailpoint.com/) IdentityNow data connector provides the capability to ingest [SailPoint IdentityNow] search events into Microsoft Sentinel through the REST API. The connector provides customers the ability to extract audit information from their IdentityNow tenant. It is intended to make it even easier to bring IdentityNow user activity and governance events into Microsoft Sentinel to improve insights from your security incident and event monitoring solution. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **SailPoint IdentityNow API Authentication Credentials**: TENANT_ID, CLIENT_ID and CLIENT_SECRET are required for authentication. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the SailPoint IdentityNow REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Configuration steps for the SailPoint IdentityNow API** + + [Follow the instructions](https://community.sailpoint.com/t5/IdentityNow-Articles/Best-Practice-Using-Personal-Access-Tokens-in-IdentityNow/ta-p/150471) to obtain the credentials. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the SailPoint IdentityNow data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the SailPoint IdentityNow data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-sailpointidentitynow-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. +3. Enter other information and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the SailPoint IdentityNow data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-sailpointidentitynow-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. searcheventXXXXX). + + e. **Select a runtime:** Choose Python 3.9. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select ** New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + TENANT_ID + SHARED_KEY + LIMIT + GRANT_TYPE + CUSTOMER_ID + CLIENT_ID + CLIENT_SECRET + AZURE_STORAGE_ACCESS_KEY + AZURE_STORAGE_ACCOUNT_NAME + AzureWebJobsStorage + logAnalyticsUri (optional) +> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +3. Once all application settings have been entered, click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `SailPointIDN_Events_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/salesforce-service-cloud.md b/Tools/Solutions Analyzer/connector-docs/solutions/salesforce-service-cloud.md index 95362bca1d2..4c4e4447edc 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/salesforce-service-cloud.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/salesforce-service-cloud.md @@ -25,6 +25,31 @@ This solution provides **2 data connector(s)**. The Salesforce Service Cloud data connector provides the capability to ingest information about your Salesforce operational events into Microsoft Sentinel through the REST API. The connector provides ability to review events in your org on an accelerated basis, get [event log files](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/event_log_file_hourly_overview.htm) in hourly increments for recent activity. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **Salesforce Service Cloud API access**: Access to the Salesforce Service Cloud API through a Connected App is required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect to Salesforce Service Cloud API to start collecting event logs in Microsoft Sentinel** + +Follow [Create a Connected App in Salesforce for OAuth](https://help.salesforce.com/s/articleView?id=platform.ev_relay_create_connected_app.htm&type=5) and [Configure a Connected App for the OAuth 2.0 Client Credentials Flow](https://help.salesforce.com/s/articleView?id=xcloud.connected_app_client_credentials_setup.htm&type=5) to create a Connected App with access to the Salesforce Service Cloud API. Through those instructions, you should get the Consumer Key and Consumer Secret. + For Salesforce Domain name, Go to Setup, type My Domain in the Quick Find box, and select My Domain to view your domain details. Make sure to enter the domain name without a trailing slash (e.g., https://your-domain.my.salesforce.com). Fill the form below with that information. +- **Salesforce Domain Name**: Salesforce Domain Name +- **Log Collection Interval** (select) + - Hourly + - Daily +- **OAuth Configuration**: + - Consumer Key + - Consumer Secret + - Click 'Connect' to authenticate + | | | |--------------------------|---| | **Tables Ingested** | `SalesforceServiceCloudV2_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/samsung-knox-asset-intelligence.md b/Tools/Solutions Analyzer/connector-docs/solutions/samsung-knox-asset-intelligence.md index c798e8af6ae..5ae1e17be5f 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/samsung-knox-asset-intelligence.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/samsung-knox-asset-intelligence.md @@ -21,6 +21,64 @@ This solution provides **1 data connector(s)**. Samsung Knox Asset Intelligence Data Connector lets you centralize your mobile security events and logs in order to view customized insights using the Workbook template, and identify incidents based on Analytics Rules templates. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Entra app**: An Entra app needs to be registered and provisioned with ‘Microsoft Metrics Publisher’ role and configured with either Certificate or Client Secret as credentials for secure data transfer. See [the Log ingestion tutorial to learn more about Entra App creation, registration and credential configuration.](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal) + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +This Data Connector uses the Microsoft Log Ingestion API to push security events into Microsoft Sentinel from Samsung Knox Asset Intelligence (KAI) solution. + +**2. STEP 1 - Create and register an Entra Application** + +>**Note**: This Data Connector can support either Certificate-based or Client Secret-based authentication. For Certificate-based authentication, you can download the Samsung CA-signed certificate (public key) from [KAI documentation portal](https://docs.samsungknox.com/admin/knox-asset-intelligence/assets/samsung-knox-validation-certificate.crt). For Client Secret-based authentication, you can create the secret during the Entra application registration. Ensure you copy the Client Secret value as soon as it is generated. + +>**IMPORTANT:** Save the values for Tenant (Directory) ID and Client (Application) ID. If Client Secret-based authentication is enabled, save Client Secret (Secret Value) associated with the Entra app. + +**3. STEP 2 - Automate deployment of this Data Connector using the below Azure Resource Manager (ARM) template** + +>**IMPORTANT:** Before deploying the Data Connector, copy the below Workspace name associated with your Microsoft Sentinel (also your Log Analytics) instance. +- **Workspace Name**: `WorkspaceName` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +1. Click the button below to install Samsung Knox Intelligence Solution. + + [![DeployToAzure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SamsungDCDefinition-azuredeploy)\n2. Provide the following required fields: Log Analytics Workspace Name, Log Analytics Workspace Location, Log Analytics Workspace Subscription (ID) and Log Analytics Workspace Resource Group. + +**5. STEP 3 - Obtain Microsoft Sentinel Data Collection details** + +Once the ARM template is deployed, navigate to Data Collection Rules https://portal.azure.com/#browse/microsoft.insights%2Fdatacollectionrules? and save values associated with the Immutable ID (DCR) and Data Collection Endpoint (DCE). + +>**IMPORTANT:** To enable end-to-end integration, information related to Microsoft Sentinel DCE and DCR are required for configuration in Samsung Knox Asset Intelligence portal (STEP 4). + +Ensure the Entra Application created in STEP 1 has permissions to use the DCR created in order to send data to the DCE. Please refer to https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#assign-permissions-to-the-dcr to assign permissions accordingly. + +**6. STEP 4 - Connect to Samsung Knox Asset Intelligence solution to configure Microsoft Sentinel to push select Knox Security Events as Alerts** + +1. Login to [Knox Asset Intelligence administration portal](https://central.samsungknox.com/kaiadmin/dai/home) and navigate to **Dashboard Settings**; this is available at the top-right corner of the Portal. +> **Note**: Ensure the login user has access to 'Security' and 'Manage dashboard view and data collection' permissions. + +2. Click on Security tab to view settings for Microsoft Sentinel Integration and Knox Security Logs. + +3. In the Security Operations Integration page, toggle on **'Enable Microsoft Sentinel Integration'** and enter appropriate values in the required fields. + + >a. Based on the authentication method used, refer to information saved from STEP 1 while registering the Entra application. + + >b. For Microsoft Sentinel DCE and DCR, refer to the information saved from STEP 3. + +4. Click on **'Test Connection'** and ensure the connection is successful. + +5. Before you can Save, configure Knox Security Logs by selecting either Essential or Advanced configuration **(default: Essential).** + +6. To complete the Microsoft Sentinel integration, click **'Save'**. + | | | |--------------------------|---| | **Tables Ingested** | `Samsung_Knox_Application_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/sap-btp.md b/Tools/Solutions Analyzer/connector-docs/solutions/sap-btp.md index 61df2a4069f..4a60375cd07 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/sap-btp.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/sap-btp.md @@ -21,6 +21,59 @@ This solution provides **1 data connector(s)**. SAP Business Technology Platform (SAP BTP) brings together data management, analytics, artificial intelligence, application development, automation, and integration in one, unified environment. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +**Custom Permissions:** +- **Client Id and Client Secret for Audit Retrieval API**: Enable API access in BTP. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**Step 1 - Configuration steps for the SAP BTP Audit Retrieval API** + +Follow the steps provided by SAP [see Audit Log Retrieval API for Global Accounts in the Cloud Foundry Environment](https://help.sap.com/docs/btp/sap-business-technology-platform/audit-log-retrieval-api-for-global-accounts-in-cloud-foundry-environment/). Take a note of the **url** (Audit Retrieval API URL), **uaa.url** (User Account and Authentication Server url) and the associated **uaa.clientid**. + +>**NOTE:** You can onboard one or more BTP subaccounts by following the steps provided by SAP [see Audit Log Retrieval API Usage for Subaccounts in the Cloud Foundry Environment](https://help.sap.com/docs/btp/sap-business-technology-platform/audit-log-retrieval-api-usage-for-subaccounts-in-cloud-foundry-environment/). Add a connection for each subaccount. + +**2. Connect events from SAP BTP to Microsoft Sentinel** + +Connect using OAuth client credentials +**BTP connection** + +When you click the "Add account" button in the portal, a configuration form will open. You'll need to provide: + +*Account Details* + +- **Subaccount name (e.g. Contoso). This will be projected to the InstanceName column.** (optional): no space or special character allowed! +- **SAP BTP Client ID** (optional): Client ID +- **SAP BTP Client Secret** (optional): Client Secret +- **Authorization server URL (UAA server)** (optional): https://your-tenant.authentication.region.hana.ondemand.com +- **Audit Retrieval API URL** (optional): https://auditlog-management.cfapps.region.hana.ondemand.com + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + + +**3. Subaccounts** + +Each row represents a connected subaccount +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Subaccount Name** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + | | | |--------------------------|---| | **Tables Ingested** | `SAPBTPAuditLog_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/sap-etd-cloud.md b/Tools/Solutions Analyzer/connector-docs/solutions/sap-etd-cloud.md index 2b404cbccbd..5c60126c58c 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/sap-etd-cloud.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/sap-etd-cloud.md @@ -22,6 +22,60 @@ This solution provides **1 data connector(s)**. The SAP Enterprise Threat Detection, cloud edition (ETD) data connector enables ingestion of security alerts from ETD into Microsoft Sentinel, supporting cross-correlation, alerting, and threat hunting. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +**Custom Permissions:** +- **Client Id and Client Secret for ETD Retrieval API**: Enable API access in ETD. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**Step 1 - Configuration steps for the SAP ETD Audit Retrieval API** + +Follow the steps provided by SAP [see ETD docs](https://help.sap.com/docs/ETD/sap-business-technology-platform/audit-log-retrieval-api-for-global-accounts-in-cloud-foundry-environment/). Take a note of the **url** (Audit Retrieval API URL), **uaa.url** (User Account and Authentication Server url) and the associated **uaa.clientid**. + +>**NOTE:** You can onboard one or more ETD subaccounts by following the steps provided by SAP [see ETD docs](https://help.sap.com/docs/ETD/sap-business-technology-platform/audit-log-retrieval-api-usage-for-subaccounts-in-cloud-foundry-environment/). Add a connection for each subaccount. + +>**TIP:** Use the [shared blog series](https://community.sap.com/t5/enterprise-resource-planning-blog-posts-by-sap/sap-enterprise-threat-detection-cloud-edition-joins-forces-with-microsoft/ba-p/13942075) for additional info. + +**2. Connect events from SAP ETD to Microsoft Sentinel** + +Connect using OAuth client credentials +**ETD connection** + +When you click the "Add account" button in the portal, a configuration form will open. You'll need to provide: + +*Account Details* + +- **SAP ETD Client ID** (optional): Client ID +- **SAP ETD Client Secret** (optional): Client Secret +- **Authorization server URL (UAA server)** (optional): https://your-tenant.authentication.region.hana.ondemand.com/oauth/token +- **SAP ETD data retrieval API URL** (optional): https://your-etd-cloud-data-retrieval-service.cfapps.region.hana.ondemand.com + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + + +**3. ETD accounts** + +Each row represents a connected ETD account +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Data retrieval endpoint** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + | | | |--------------------------|---| | **Tables Ingested** | `SAPETDAlerts_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/sap-logserv.md b/Tools/Solutions Analyzer/connector-docs/solutions/sap-logserv.md index b44aa087658..b6445b3a84f 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/sap-logserv.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/sap-logserv.md @@ -28,6 +28,45 @@ Near Realtime Log Collection: With ability to integrate into Microsoft Sentinel LogServ complements the existing SAP application layer threat monitoring and detections in Microsoft Sentinel with the log types owned by SAP ECS as the system provider. This includes logs like: SAP Security Audit Log (AS ABAP), HANA database, AS JAVA, ICM, SAP Web Dispatcher, SAP Cloud Connector, OS, SAP Gateway, 3rd party Database, Network, DNS, Proxy, Firewall +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +**Custom Permissions:** +- **Microsoft Entra**: Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher. +- **Microsoft Azure**: Permission to assign Monitoring Metrics Publisher role on data collection rules. Typically requires Azure RBAC Owner or User Access Administrator role. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Create ARM Resources and Provide the Required Permissions** + +We will create data collection rule (DCR) and data collection endpoint (DCE) resources. We will also create a Microsoft Entra app registration and assign the required permissions to it. +#### Automated deployment of Azure resources +Clicking on "Deploy push connector resources" will trigger the creation of DCR and DCE resources. +It will then create a Microsoft Entra app registration with client secret and grant permissions on the DCR. This setup enables data to be sent securely to the DCR using a OAuth v2 client credentials. +- Deploy push connector resources + Application: SAP LogServ push to Microsoft Sentinel + +**2. Maintain the data collection endpoint details and authentication info in SAP LogServ** + +Share the data collection endpoint URL and authentication info with the SAP LogServ administrator to configure the SAP LogServ to send data to the data collection endpoint. + +Learn more from [this blog series](https://community.sap.com/t5/enterprise-resource-planning-blog-posts-by-members/ultimate-blog-series-sap-logserv-integration-with-microsoft-sentinel/ba-p/14126401). +- **Use this value to configure as Tenant ID in the LogIngestionAPI credential.**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Entra Application ID**: `ApplicationId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Entra Application Secret**: `ApplicationSecret` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Use this value to configure the LogsIngestionURL parameter when deploying the IFlow.**: `DataCollectionEndpoint` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **DCR Immutable ID**: `DataCollectionRuleId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `SAPLogServ_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/sap-s4-cloud-public-edition.md b/Tools/Solutions Analyzer/connector-docs/solutions/sap-s4-cloud-public-edition.md index 8d150bbd741..96d6c7f3901 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/sap-s4-cloud-public-edition.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/sap-s4-cloud-public-edition.md @@ -21,6 +21,63 @@ This solution provides **1 data connector(s)**. The SAP S/4HANA Cloud Public Edition (GROW with SAP) data connector enables ingestion of SAP's security audit log into the Microsoft Sentinel Solution for SAP, supporting cross-correlation, alerting, and threat hunting. Looking for alternative authentication mechanisms? See [here](https://github.com/Azure-Samples/Sentinel-For-SAP-Community/tree/main/integration-artifacts). +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. +- **Keys** (Workspace): Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +**Custom Permissions:** +- **Client Id and Client Secret for Audit Retrieval API**: Enable API access in BTP. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**Step 1 - Configuration steps for SAP S/4HANA Cloud Public Edition** + +To connect to SAP S/4HANA Cloud Public Edition, you will need: + +1. Configure a communication arrangement for communication scenario **[SAP_COM_0750](https://help.sap.com/docs/SAP_S4HANA_CLOUD/0f69f8fb28ac4bf48d2b57b9637e81fa/a93dca70e2ce43d19ac93e3e5531e37d.html)** + +2. SAP S/4HANA Cloud Public Edition tenant **API URL** +3. Valid **communication user (username and password)** for your SAP S/4HANA Cloud system +4. **Appropriate authorizations** to access audit log data via OData services + +>**NOTE:** This connector supports Basic authentication. Looking for alternative authentication mechanisms? See [here](https://github.com/Azure-Samples/Sentinel-For-SAP-Community/tree/main/integration-artifacts) + +**2. Connect events from SAP S/4HANA Cloud Public Edition to Microsoft Sentinel Solution for SAP** + +Connect using Basic authentication +**S/4HANA Cloud Public Edition connection** + +When you click the "Add account" button in the portal, a configuration form will open. You'll need to provide: + +*Account Details* + +- **Username** (optional): Enter your SAP S/4HANA Cloud username +- **Password** (optional): Enter your SAP S/4HANA Cloud password +- **SAP S/4HANA Cloud API URL** (optional): https://my123456-api.s4hana.cloud.sap + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + + +**3. S/4HANA Cloud Public Edition connections** + +Each row represents a connected S/4HANA Cloud Public Edition system +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **S/4HANA Cloud API endpoint** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + | | | |--------------------------|---| | **Tables Ingested** | `ABAPAuditLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/securitybridge-app.md b/Tools/Solutions Analyzer/connector-docs/solutions/securitybridge-app.md index 54d72b5f98b..f76591d257d 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/securitybridge-app.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/securitybridge-app.md @@ -25,6 +25,77 @@ This solution provides **2 data connector(s)**. SecurityBridge is the first and only holistic, natively integrated security platform, addressing all aspects needed to protect organizations running SAP from internal and external threats against their core business applications. The SecurityBridge platform is an SAP-certified add-on, used by organizations around the globe, and addresses the clients’ need for advanced cybersecurity, real-time monitoring, compliance, code security, and patching to protect against internal and external threats.This Microsoft Sentinel Solution allows you to integrate SecurityBridge Threat Detection events from all your on-premise and cloud based SAP instances into your security monitoring.Use this Microsoft Sentinel Solution to receive normalized and speaking security events, pre-built dashboards and out-of-the-box templates for your SAP security monitoring. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +*NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias SecurityBridgeLogs and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityBridge%20App/Parsers/SecurityBridgeLogs.txt).The function usually takes 10-15 minutes to activate after solution installation/update. + +>**NOTE:** This data connector has been developed using SecurityBridge Application Platform 7.4.0. + +**1. Install and onboard the agent for Linux or Windows** + +This solution requires logs collection via an Microsoft Sentinel agent installation + +> The Microsoft Sentinel agent is supported on the following Operating Systems: +1. Windows Servers +2. SUSE Linux Enterprise Server +3. Redhat Linux Enterprise Server +4. Oracle Linux Enterprise Server +5. If you have the SAP solution installed on HPUX / AIX then you will need to deploy a log collector on one of the Linux options listed above and forward your logs to that collector +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure the logs to be collected** + +Configure the custom log directory to be collected +- **Open custom logs settings** + +1. Select the link above to open your workspace advanced settings +2. Click **+Add custom** +3. Click **Browse** to upload a sample of a SecurityBridge SAP log file (e.g. AED_20211129164544.cef). Then, click **Next >** +4. Select **New Line** as the record delimiter then click **Next >** +5. Select **Windows** or **Linux** and enter the path to SecurityBridge logs based on your configuration. Example: + - '/usr/sap/tmp/sb_events/*.cef' + +>**NOTE:** You can add as many paths as you want in the configuration. + +6. After entering the path, click the '+' symbol to apply, then click **Next >** +7. Add **SecurityBridgeLogs** as the custom log Name and click **Done** + +**3. Check logs in Microsoft Sentinel** + +Open Log Analytics to check if the logs are received using the SecurityBridgeLogs_CL Custom log table. + +>**NOTE:** It may take up to 30 minutes before new logs will appear in SecurityBridgeLogs_CL table. + | | | |--------------------------|---| | **Tables Ingested** | `SecurityBridgeLogs_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/securityscorecard-cybersecurity-ratings.md b/Tools/Solutions Analyzer/connector-docs/solutions/securityscorecard-cybersecurity-ratings.md index ebe2154d4cc..4d645dade25 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/securityscorecard-cybersecurity-ratings.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/securityscorecard-cybersecurity-ratings.md @@ -30,6 +30,119 @@ This solution provides **3 data connector(s)**. SecurityScorecard is the leader in cybersecurity risk ratings. The [SecurityScorecard](https://www.SecurityScorecard.com/) data connector provides the ability for Sentinel to import SecurityScorecard ratings as logs. SecurityScorecard provides ratings for over 12 million companies and domains using countless data points from across the internet. Maintain full awareness of any company's security posture and be able to receive timely updates when scores change or drop. SecurityScorecard ratings are updated daily based on evidence collected across the web. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **SecurityScorecard API Key** is required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the SecurityScorecard API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Configuration steps for the SecurityScorecard API** + + Follow these instructions to create/get a SecurityScorecard API token. + 1. As an administrator in SecurityScorecard, navigate to My Settings and then Users + 2. Click '+ Add User' + 3. In the form, check off 'Check to create a bot user' + 4. Provide a name for the Bot and provide it with Read Only permission + 5. Click 'Add User' + 6. Locate the newly created Bot user + 7. Click 'create token' in the Bot user's row + 8. Click 'Confirm' and note the API token that has been generated + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the SecurityScorecard Ratings data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the SecurityScorecard API Authorization Key(s) +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the SecurityScorecard Ratings connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SecurityScorecardRatingsAPI-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the below information : + Function Name + Workspace ID + Workspace Key + SecurityScorecard API Key + SecurityScorecard Base URL (https://api.securityscorecard.io) + Domain + Portfolio IDs (Coma separated IDs) + SecurityScorecard Ratings Table Name (Default: SecurityScorecardRatings) + Level Ratings Change (Default: 7) + Ratings Schedule (Default: 0 45 * * * *) + Diff Override Own Ratings (Default: true) + Diff Override Portfolio Ratings (Default: true) +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the SecurityScorecard Ratings data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-SecurityScorecardRatingsAPI-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. SecurityScorecardXXXXX). + + e. **Select a runtime:** Choose Python 3.8 or above. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective values (case-sensitive): + Workspace ID + Workspace Key + SecurityScorecard API Key + SecurityScorecard Base URL (https://api.securityscorecard.io) + Domain + Portfolio IDs (Coma separated IDs) + SecurityScorecard Ratings Table Name (Default: SecurityScorecardRatings) + Level Ratings Change (Default: 7) + Ratings Schedule (Default: 0 45 * * * *) + Diff Override Own Ratings (Default: true) + Diff Override Portfolio Ratings (Default: true) + logAnalyticsUri (optional) + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `SecurityScorecardRatings_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/semperis-directory-services-protector.md b/Tools/Solutions Analyzer/connector-docs/solutions/semperis-directory-services-protector.md index 97218d84893..30738d41e06 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/semperis-directory-services-protector.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/semperis-directory-services-protector.md @@ -23,6 +23,74 @@ Semperis Directory Services Protector data connector allows for the export of it It provides a data parser to manipulate the Windows event logs more easily. The different workbooks ease your Active Directory security monitoring and provide different ways to visualize the data. The analytic templates allow to automate responses regarding different events, exposures, or attacks. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**dsp_parser**](https://aka.ms/sentinel-SemperisDSP-parser) which is deployed with the Microsoft Sentinel Solution. + +**2. **Configure Windows Security Events via AMA connector**** + +Collect Windows security events logs from your **Semperis DSP Management Server** . + +**1. Install the Azure Monitor Agent (AMA)** + +On your **Semperis DSP Management Server** install the AMA on the DSP machine that will act as the event log forwarder. +You can skip this step if you have already installed the Microsoft agent for Windows + +**2. Create a Data Collection Rule (DCR)** + +Start collecting logs from the **Semperis DSP Management Server** . + +1. In the Azure portal, navigate to your **Log Analytics workspace**. +2. In the left pane, click on **Configuration** and then **Data connectors**. +3. Find and install the **the Windows Security Events via AMA** connector. +4. Click on **Open connector** and then on **Create data collection rule**. +5. Configure the DCR with the necessary details, such as the log sources and the destination workspace. +**Choose where to install the agent:** + +**Install agent on Semperis DSP Management Server** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**3. **Configure Common Event Format via AMA connector**** + +Collect syslog messages send from your **Semperis DSP Management Server** . + +**1. Install the Azure Monitor Agent (AMA)** + +Install the AMA on the Linux machine that will act as the log forwarder. This machine will collect and forward CEF logs to Microsoft Sentinel. +You can skip this step if you have already installed the Microsoft agent for Linux + +**2. Create a Data Collection Rule (DCR)** + +Start collecting logs from the **Semperis DSP Management Server** . + +1. In the Azure portal, navigate to your **Log Analytics workspace**. +2. In the left pane, click on **Configuration** and then **Data connectors**. +3. Find and install the **the Common Event Format via AMA** connector. +4. Click on **Open connector** and then on **Create data collection rule**. +5. Configure the DCR with the necessary details, such as the log sources and the destination workspace. +**Choose where to install the agent:** + +**Install agent on Semperis DSP Management Server** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**3. Configure sending CEF logs on your Semperis DSP Management Server** + +Configure your **Semperis DSP Management Server** to send CEF logs to the Linux machine where the AMA is installed. This involves setting the destination IP address and port for the CEF logs + +> You should now be able to receive logs in the *Windows event log* table and *common log* table, log data can be parsed using the **dsp_parser()** function, used by all query samples, workbooks and analytic templates. + | | | |--------------------------|---| | **Tables Ingested** | `SecurityEvent` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/senservapro.md b/Tools/Solutions Analyzer/connector-docs/solutions/senservapro.md index 08aeb5406a3..dc94159fa2d 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/senservapro.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/senservapro.md @@ -21,6 +21,24 @@ This solution provides **1 data connector(s)**. The SenservaPro data connector provides a viewing experience for your SenservaPro scanning logs. View dashboards of your data, use queries to hunt & explore, and create custom alerts. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Setup the data connection** + +Visit [Senserva Setup](https://www.senserva.com/senserva-microsoft-sentinel-edition-setup/) for information on setting up the Senserva data connection, support, or any other questions. The Senserva installation will configure a Log Analytics Workspace for output. Deploy Microsoft Sentinel onto the configured Log Analytics Workspace to finish the data connection setup by following [this onboarding guide.](https://docs.microsoft.com/azure/sentinel/quickstart-onboard) +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `SenservaPro_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/sentinelone.md b/Tools/Solutions Analyzer/connector-docs/solutions/sentinelone.md index a7b02f985b9..8ad6cc843a0 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/sentinelone.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/sentinelone.md @@ -25,6 +25,31 @@ This solution provides **2 data connector(s)**. The [SentinelOne](https://usea1-nessat.sentinelone.net/api-doc/overview) data connector allows ingesting logs from the SentinelOne API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the SentinelOne API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +#### Configuration steps for the SentinelOne API + Follow the instructions to obtain the credentials. You can also follow the [guide](https://usea1-nessat.sentinelone.net/docs/en/how-to-automate-api-token-generation.html#how-to-automate-api-token-generation) to generate API key. +#### 1. Retrieve SentinelOne Management URL + 1.1. Log in to the SentinelOne [**Management Console**] with Admin user credentials + 1.2. In the [**Management Console**] copy the URL link above without the URL path. +#### 2. Retrieve API Token + 2.1. Log in to the SentinelOne [**Management Console**] with Admin user credentials + 2.2. In the [**Management Console**], click [**Settings**] + 2.3. In [**Settings**] view click on [**USERS**]. + 2.4. In the [**USERS**] Page click on [**Service Users**] -> [**Actions**] -> [**Create new service user**]. + 2.5. Choose [**Expiration date**] and [**scope**] (by site) and click on [**Create User**]. + 2.6. Once the [**Service User**] is created copy the [**API Token**] from page and press [**Save**] +- **SentinelOne Management URL**: https://example.sentinelone.net/ +- **API Token**: API Token +- Click 'Connect' to establish connection + | | | |--------------------------|---| | **Tables Ingested** | `SentinelOneActivities_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/seraphicsecurity.md b/Tools/Solutions Analyzer/connector-docs/solutions/seraphicsecurity.md index 44689d8f0f4..7e9578b17d8 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/seraphicsecurity.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/seraphicsecurity.md @@ -22,6 +22,23 @@ This solution provides **1 data connector(s)**. The Seraphic Web Security data connector provides the capability to ingest [Seraphic Web Security](https://seraphicsecurity.com/) events and alerts into Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Custom Permissions:** +- **Seraphic API key**: API key for Microsoft Sentinel connected to your Seraphic Web Security tenant. To get this API key for your tenant - [read this documentation](https://constellation.seraphicsecurity.com/integrations/microsoft_sentinel/Guidance/MicrosoftSentinel-IntegrationGuide-230822.pdf). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Seraphic Web Security** + +Please insert the integration name, the Seraphic integration URL and your workspace name for Microsoft Sentinel: +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `APIKey`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + | | | |--------------------------|---| | **Tables Ingested** | `SeraphicWebSecurity_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/sevcosecurity.md b/Tools/Solutions Analyzer/connector-docs/solutions/sevcosecurity.md index 861b09fe6d6..6edf0b9066d 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/sevcosecurity.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/sevcosecurity.md @@ -25,6 +25,26 @@ The Sevco Platform - Devices connector allows you to easily connect your Sevco D [For more information >​](https://docs.sev.co/docs/microsoft-sentinel-inventory) +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Configure and connect to Sevco** + +The Sevco Platform can integrate with and export assets directly to Microsoft Sentinel..​ + +1. Go to [Sevco - Microsoft Sentinel Integration](https://docs.sev.co/docs/microsoft-sentinel-inventory), and follow the instructions, using the parameters below to set up the connection:. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `Sevco_Devices_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/signl4.md b/Tools/Solutions Analyzer/connector-docs/solutions/signl4.md index e944d9ad6ea..b1ee2e798c8 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/signl4.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/signl4.md @@ -26,6 +26,56 @@ When critical systems fail or security incidents happen, SIGNL4 bridges the ‘l [Learn more >](https://www.signl4.com) +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector is mainly configured on the SIGNL4 side. You can find a description video here: [**Integrate SIGNL4 with Microsoft Sentinel**](https://www.signl4.com/blog/portfolio_item/azure-sentinel-mobile-alert-notification-duty-schedule-escalation/). + +>**SIGNL4 Connector:** The SIGNL4 connector for Microsoft Sentinel, Azure Security Center and other Azure Graph Security API providers provides seamless 2-way integration with your Azure Security solutions. Once added to your SIGNL4 team, the connector will read security alerts from Azure Graph Security API and fully automatically and trigger alert notifications to your team members on duty. It will also synchronize the alert status from SIGNL4 to Graph Security API, so that if alerts are acknowledged or closed, this status is also updated on the according Azure Graph Security API alert or the corresponding security provider. As mentioned, the connector mainly uses Azure Graph Security API, but for some security providers, such as Microsoft Sentinel, it also uses dedicated REST APIs from according Azure solutions. + +**1. Microsoft Sentinel Features** + +Microsoft Sentinel is a cloud native SIEM solution from Microsoft and a security alert provider in Azure Graph Security API. However, the level of alert details available with the Graph Security API is limited for Microsoft Sentinel. The connector can therefore augment alerts with further details (insights rule search results), from the underlying Microsoft Sentinel Log Analytics workspace. To be able to do that, the connector communicates with Azure Log Analytics REST API and needs according permissions (see below). Furthermore, the app can also update the status of Microsoft Sentinel incidents, when all related security alerts are e.g. in progress or resolved. In order to be able to do that, the connector needs to be a member of the 'Microsoft Sentinel Contributors' group in your Azure Subscription. + **Automated deployment in Azure** + The credentials required to access the beforementioned APIs, are generated by a small PowerShell script that you can download below. The script performs the following tasks for you: + - Logs you on to your Azure Subscription (please login with an administrator account) + - Creates a new enterprise application for this connector in your Azure AD, also referred to as service principal + - Creates a new role in your Azure IAM that grants read/query permission to only Azure Log Analytics workspaces. + - Joins the enterprise application to that user role + - Joins the enterprise application to the 'Microsoft Sentinel Contributors' role + - Outputs some data that you need to configure app (see below) + +**2. Deployment procedure** + +1. Download the PowerShell deployment script from [here](https://github.com/signl4/signl4-integration-azuresentinel/blob/master/registerSIGNL4Client.ps1). +2. Review the script and the roles and permission scopes it deploys for the new app registration. If you don't want to use the connector with Microsoft Sentinel, you could remove all role creation and role assignment code and only use it to create the app registration (SPN) in your Azure Active Directory. +3. Run the script. At the end it outputs information that you need to enter in the connector app configuration. +4. In Azure AD, click on 'App Registrations'. Find the app with the name 'SIGNL4AzureSecurity' and open its details +5. On the left menu blade click 'API Permissions'. Then click 'Add a permission'. +6. On the blade that loads, under 'Microsoft APIs' click on the 'Microsoft Graph' tile, then click 'App permission'. +7. In the table that is displayed expand 'SecurityEvents' and check 'SecurityEvents.Read.All' and 'SecurityEvents.ReadWrite.All'. +8. Click 'Add permissions'. + +**3. Configuring the SIGNL4 connector app** + +Finally, enter the IDs, that the script has outputted in the connector configuration: + - Azure Tenant ID + - Azure Subscription ID + - Client ID (of the enterprise application) + - Client Secret (of the enterprise application) + Once the app is enabled, it will start reading your Azure Graph Security API alerts. + +>**NOTE:** It will initially only read the alerts that have occurred within the last 24 hours. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `SIGNL4_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/silverfort.md b/Tools/Solutions Analyzer/connector-docs/solutions/silverfort.md index 37eed574317..a2b027e2931 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/silverfort.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/silverfort.md @@ -25,6 +25,59 @@ The [Silverfort](https://silverfort.com) ITDR Admin Console connector solution a Please contact Silverfort or consult the Silverfort documentation for more information. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Common Event Format (CEF) logs to Syslog agent** + +Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/sinec-security-guard.md b/Tools/Solutions Analyzer/connector-docs/solutions/sinec-security-guard.md index 4fd8916b5ac..1bf9f4a7998 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/sinec-security-guard.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/sinec-security-guard.md @@ -21,6 +21,27 @@ This solution provides **1 data connector(s)**. The SINEC Security Guard solution for Microsoft Sentinel allows you to ingest security events of your industrial networks from the [SINEC Security Guard](https://siemens.com/sinec-security-guard) into Microsoft Sentinel +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +This Data Connector relies on the SINEC Security Guard Sensor Package to be able to receive Sensor events in Microsoft Sentinel. The Sensor Package can be purchased in the Siemens Xcelerator Marketplace. +**1. Please follow the steps to configure the data connector** + +**Set up the SINEC Security Guard Sensor** + + Detailed step for setting up the sensor. + + **Create the Data Connector and configure it in the SINEC Security Guard web interface** + + Instructions on configuring the data connector. + | | | |--------------------------|---| | **Tables Ingested** | `SINECSecurityGuard_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/slackaudit.md b/Tools/Solutions Analyzer/connector-docs/solutions/slackaudit.md index 33512b9c1ab..1c5b5472e7c 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/slackaudit.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/slackaudit.md @@ -29,6 +29,48 @@ This solution provides **3 data connector(s)**. The SlackAudit data connector provides the capability to ingest [Slack Audit logs](https://api.slack.com/admins/audit-logs) into Microsoft Sentinel through the REST API. Refer to [API documentation](https://api.slack.com/admins/audit-logs-call) for more information. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **UserName, SlackAudit API Key & Action Type**: To Generate the Access Token, create a new application in Slack, then add necessary scopes and configure the redirect URL. For detailed instructions on generating the access token, user name and action name limit, refer the [link](https://github.com/v-gsrihitha/v-gsrihitha/blob/main/SlackAudit/Readme.md). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect SlackAudit to Microsoft Sentinel** + +To ingest data from SlackAudit to Microsoft Sentinel, you have to click on Add Domain button below then you get a pop up to fill the details, provide the required information and click on Connect. You can see the usernames, actions connected in the grid. +> +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **UserName** +- **Actions** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + +**Add domain** + +*Add domain* + +When you click the "Add domain" button in the portal, a configuration form will open. You'll need to provide: + +- **UserName** (optional): Enter your User Name +- **SlackAudit API Key** (optional): Enter your API KEY +- **SlackAudit Action Type** (optional): Enter the Action Type + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + | | | |--------------------------|---| | **Tables Ingested** | `SlackAuditV2_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/snowflake.md b/Tools/Solutions Analyzer/connector-docs/solutions/snowflake.md index 98a88895025..2a8684ec920 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/snowflake.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/snowflake.md @@ -25,6 +25,48 @@ This solution provides **2 data connector(s)**. The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Snowflake to Microsoft Sentinel** +>**NOTE:** To ensure data is presented in separate columns for each field, execute the parser using the **Snowflake()** function +To gather data from Snowflake, you need to provide the following resources +#### 1. Account Identifier + To gather data from Snowflake, you'll need Snowflake Account Identifier. +#### 2. Programmatic Access Token + To gather data from Snowflake, you'll need the Snowflake Programmatic Access Token +For detailed instructions on retrieving the Account Identifier and Programmatic Access Token, please refer to the [Connector Tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/Readme.md). +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Account-Identifier** +- **Table Name** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + +**Add Account** + +*Add Account* + +When you click the "Add Account" button in the portal, a configuration form will open. You'll need to provide: + +- **Snowflake Account Identifier** (required): Enter Snowflake Account Identifier +- **Snowflake PAT** (required): Enter Snowflake PAT + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + | | | |--------------------------|---| | **Tables Ingested** | `SnowflakeLoad_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/soc-prime-ccf.md b/Tools/Solutions Analyzer/connector-docs/solutions/soc-prime-ccf.md new file mode 100644 index 00000000000..0cc50dc1402 --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/solutions/soc-prime-ccf.md @@ -0,0 +1,60 @@ +# SOC Prime CCF + +## Solution Information + +| | | +|------------------------|-------| +| **Publisher** | SOC Prime | +| **Support Tier** | Partner | +| **Support Link** | [https://socprime.com/](https://socprime.com/) | +| **Categories** | domains | +| **First Published** | 2025-09-25 | +| **Solution Folder** | [https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SOC%20Prime%20CCF](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SOC%20Prime%20CCF) | + +## Data Connectors + +This solution provides **1 data connector(s)**. + +### [SOC Prime Platform Audit Logs Data Connector](../connectors/socprimeauditlogsdataconnector.md) + +**Publisher:** Microsoft + +The [SOC Prime Audit Logs](https://help.socprime.com/en/articles/6265791-api) data connector allows ingesting logs from the SOC Prime Platform API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the SOC Prime Platform API to fetch SOC Prime platform audit logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table, thus resulting in better performance. + +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +#### Configuration steps for the SOC Prime Platform API + Follow the instructions to obtain the credentials. you can also follow this [guide](https://help.socprime.com/en/articles/6265791-api#h_8a0d20b204) to generate personal API key. +#### Retrieve API Key + 1. Log in to the SOC Prime Platform + 2. Click [**Account**] icon -> [**Platform Settings**] -> [**API**] + 3. Click [**Add New Key**] + 4. In the modal that appears give your key a meaningful name, set expiration date and product APIs the key provides access to + 5. Click on [**Generate**] + 6. Copy the key and save it in a safe place. You won't be able to view it again once you close this modal +- **SOC Prime API Key**: (password field) +- Click 'Connect' to establish connection + +| | | +|--------------------------|---| +| **Tables Ingested** | `SOCPrimeAuditLogs_CL` | +| **Connector Definition Files** | [SOCPrime_DataConnectorDefinition.json](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SOC%20Prime%20CCF/Data%20Connectors/SOCPrime_ccp/SOCPrime_DataConnectorDefinition.json) | + +[→ View full connector details](../connectors/socprimeauditlogsdataconnector.md) + +## Tables Reference + +This solution ingests data into **1 table(s)**: + +| Table | Used By Connectors | +|-------|-------------------| +| `SOCPrimeAuditLogs_CL` | [SOC Prime Platform Audit Logs Data Connector](../connectors/socprimeauditlogsdataconnector.md) | + +[← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/sonicwall-firewall.md b/Tools/Solutions Analyzer/connector-docs/solutions/sonicwall-firewall.md index 983d7310dca..b929fe186ec 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/sonicwall-firewall.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/sonicwall-firewall.md @@ -25,6 +25,62 @@ This solution provides **2 data connector(s)**. Common Event Format (CEF) is an industry standard format on top of Syslog messages, used by SonicWall to allow event interoperability among different platforms. By connecting your CEF logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward SonicWall Firewall Common Event Format (CEF) logs to Syslog agent** + + Set your SonicWall Firewall to send Syslog messages in CEF format to the proxy machine. Make sure you send the logs to port 514 TCP on the machine's IP address. + + Follow Instructions . Then Make sure you select local use 4 as the facility. Then select ArcSight as the Syslog format. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/sonraisecurity.md b/Tools/Solutions Analyzer/connector-docs/solutions/sonraisecurity.md index 87939e40f1b..85c347e6974 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/sonraisecurity.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/sonraisecurity.md @@ -20,6 +20,27 @@ This solution provides **1 data connector(s)**. Use this data connector to integrate with Sonrai Security and get Sonrai tickets sent directly to Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Sonrai Security Data Connector** + +1. Navigate to Sonrai Security dashboard. +2. On the bottom left panel, click on integrations. +3. Select Microsoft Sentinel from the list of available Integrations. +4. Fill in the form using the information provided below. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `Sonrai_Tickets_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/sophos-cloud-optix.md b/Tools/Solutions Analyzer/connector-docs/solutions/sophos-cloud-optix.md index 62760daec51..a381ebd5a7a 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/sophos-cloud-optix.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/sophos-cloud-optix.md @@ -21,6 +21,36 @@ This solution provides **1 data connector(s)**. The [Sophos Cloud Optix](https://www.sophos.com/products/cloud-optix.aspx) connector allows you to easily connect your Sophos Cloud Optix logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's cloud security and compliance posture and improves your cloud security operation capabilities. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Get the Workspace ID and the Primary Key** + +Copy the Workspace ID and Primary Key for your workspace. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**2. Configure the Sophos Cloud Optix Integration** + +In Sophos Cloud Optix go to [Settings->Integrations->Microsoft Sentinel](https://optix.sophos.com/#/integrations/sentinel) and enter the Workspace ID and Primary Key copied in Step 1. + +**3. Select Alert Levels** + +In Alert Levels, select which Sophos Cloud Optix alerts you want to send to Microsoft Sentinel. + +**4. Turn on the integration** + +To turn on the integration, select Enable, and then click Save. + | | | |--------------------------|---| | **Tables Ingested** | `SophosCloudOptix_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/sophos-endpoint-protection.md b/Tools/Solutions Analyzer/connector-docs/solutions/sophos-endpoint-protection.md index c3193ac0d3d..481aa52ff6d 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/sophos-endpoint-protection.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/sophos-endpoint-protection.md @@ -25,6 +25,41 @@ This solution provides **2 data connector(s)**. The [Sophos Endpoint Protection](https://www.sophos.com/en-us/products/endpoint-antivirus.aspx) data connector provides the capability to ingest [Sophos events](https://developer.sophos.com/docs/siem-v1/1/routes/events/get) and [Sophos alerts](https://developer.sophos.com/docs/siem-v1/1/routes/alerts/get) into Microsoft Sentinel. Refer to [Sophos Central Admin documentation](https://docs.sophos.com/central/Customer/help/en-us/central/Customer/concepts/Logs.html) for more information. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **Sophos Endpoint Protection API access**: Access to the Sophos Endpoint Protection API through a service principal is required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect to Sophos Endpoint Protection API to start collecting event and alert logs in Microsoft Sentinel** + +Follow [Sophos instructions](https://developer.sophos.com/getting-started-tenant) to create a service principal with access to the Sophos API. It will need the Service Principal ReadOnly role. + Through those instructions, you should get the Client ID, Client Secret, Tenant ID and data region. + Fill the form bellow with that information. +- **Sophos Tenant ID**: Sophos Tenant ID +- **Sophos Tenant Data Region**: eu01, eu02, us01, us02 or us03 +- **OAuth Configuration**: + - Client ID + - Client Secret + - Click 'Connect' to authenticate +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Name** +- **ID** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + | | | |--------------------------|---| | **Tables Ingested** | `SophosEPAlerts_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/sophos-xg-firewall.md b/Tools/Solutions Analyzer/connector-docs/solutions/sophos-xg-firewall.md index 1cb39182687..bf9531c358c 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/sophos-xg-firewall.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/sophos-xg-firewall.md @@ -21,6 +21,49 @@ This solution provides **1 data connector(s)**. The [Sophos XG Firewall](https://www.sophos.com/products/next-gen-firewall.aspx) allows you to easily connect your Sophos XG Firewall logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigations. Integrating Sophos XG Firewall with Microsoft Sentinel provides more visibility into your organization's firewall traffic and will enhance security monitoring capabilities. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +**Custom Permissions:** +- **Sophos XG Firewall**: must be configured to export logs via Syslog + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Sophos XG Firewall and load the function code or click [here](https://aka.ms/sentinel-SophosXG-parser), on the second line of the query, enter the hostname(s) of your Sophos XG Firewall device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. + 2. Select **Apply below configuration to my machines** and select the facilities and severities. + 3. Click **Save**. +- **Open Syslog settings** + +**3. Configure and connect the Sophos XG Firewall** + +[Follow these instructions](https://doc.sophos.com/nsg/sophos-firewall/20.0/Help/en-us/webhelp/onlinehelp/AdministratorHelp/SystemServices/LogSettings/SyslogServerAdd/index.html) to enable syslog streaming. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address. + | | | |--------------------------|---| | **Tables Ingested** | `Syslog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/squadra-technologies-secrmm.md b/Tools/Solutions Analyzer/connector-docs/solutions/squadra-technologies-secrmm.md index c9d3ad352f8..db3b540e20f 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/squadra-technologies-secrmm.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/squadra-technologies-secrmm.md @@ -21,6 +21,22 @@ This solution provides **1 data connector(s)**. Use the Squadra Technologies secRMM Data Connector to push USB removable storage security event data into Microsoft Sentinel Log Analytics. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Follow the step-by-step instructions provided in the [Squadra Technologies configuration guide for Azure Sentinel](https://www.squadratechnologies.com/StaticContent/ProductDownload/secRMM/9.11.0.0/secRMMAzureSentinelAdministratorGuide.pdf) +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `secRMM_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/squidproxy.md b/Tools/Solutions Analyzer/connector-docs/solutions/squidproxy.md index ba1bf77c5d9..9203107b3e4 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/squidproxy.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/squidproxy.md @@ -21,6 +21,62 @@ This solution provides **1 data connector(s)**. The [Squid Proxy](http://www.squid-cache.org/) connector allows you to easily connect your Squid Proxy logs with Microsoft Sentinel. This gives you more insight into your organization's network proxy traffic and improves your security operation capabilities. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Squid Proxy and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SquidProxy/Parsers/SquidProxy.txt), on the second line of the query, enter the hostname(s) of your SquidProxy device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux or Windows** + +Install the agent on the Squid Proxy server where the logs are generated. + +> Logs from Squid Proxy deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure the logs to be collected** + +Configure the custom log directory to be collected +- **Open custom logs settings** + +1. Select the link above to open your workspace advanced settings +2. From the left pane, select **Data**, select **Custom Logs** and click **Add+** +3. Click **Browse** to upload a sample of a Squid Proxy log file(e.g. access.log or cache.log). Then, click **Next >** +4. Select **New line** as the record delimiter and click **Next >** +5. Select **Windows** or **Linux** and enter the path to Squid Proxy logs. Default paths are: + - **Windows** directory: `C:\Squid\var\log\squid\*.log` + - **Linux** Directory: `/var/log/squid/*.log` +6. After entering the path, click the '+' symbol to apply, then click **Next >** +7. Add **SquidProxy_CL** as the custom log Name and click **Done** + | | | |--------------------------|---| | **Tables Ingested** | `SquidProxy_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/styx-intelligence.md b/Tools/Solutions Analyzer/connector-docs/solutions/styx-intelligence.md index b2ba0e429c6..b2843aef795 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/styx-intelligence.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/styx-intelligence.md @@ -21,6 +21,24 @@ This solution provides **1 data connector(s)**. The [StyxView Alerts](https://styxintel.com/) data connector enables seamless integration between the StyxView Alerts platform and Microsoft Sentinel. This connector ingests alert data from the StyxView Alerts API, allowing organizations to centralize and correlate actionable threat intelligence directly within their Microsoft Sentinel workspace. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **StyxView Alert API access**: Access to the StyxView Alerts API through an API key is required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect to StyxView Alerts API to start collecting alert logs in Microsoft Sentinel** + +Contact Styx Intelligence Support (support.team@styxintel.com) to get access to an API key. +- **API Token**: (password field) +- Click 'Connect' to establish connection + | | | |--------------------------|---| | **Tables Ingested** | `StyxViewAlerts_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/symantec-endpoint-protection.md b/Tools/Solutions Analyzer/connector-docs/solutions/symantec-endpoint-protection.md index f7fc27532eb..8813b478732 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/symantec-endpoint-protection.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/symantec-endpoint-protection.md @@ -21,6 +21,49 @@ This solution provides **1 data connector(s)**. The [Broadcom Symantec Endpoint Protection (SEP)](https://www.broadcom.com/products/cyber-security/endpoint/end-user/enterprise) connector allows you to easily connect your SEP logs with Microsoft Sentinel. This gives you more insight into your organization's network and improves your security operation capabilities. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +**Custom Permissions:** +- **Symantec Endpoint Protection (SEP)**: must be configured to export logs via Syslog + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Symantec Endpoint Protection and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Symantec%20Endpoint%20Protection/Parsers/SymantecEndpointProtection.yaml), on the second line of the query, enter the hostname(s) of your SymantecEndpointProtection device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. + 2. Select **Apply below configuration to my machines** and select the facilities and severities. + 3. Click **Save**. +- **Open Syslog settings** + +**3. Configure and connect the Symantec Endpoint Protection** + +[Follow these instructions](https://techdocs.broadcom.com/us/en/symantec-security-software/endpoint-security-and-management/endpoint-protection/all/Monitoring-Reporting-and-Enforcing-Compliance/viewing-logs-v7522439-d37e464/exporting-data-to-a-syslog-server-v8442743-d15e1107.html) to configure the Symantec Endpoint Protection to forward syslog. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address. + | | | |--------------------------|---| | **Tables Ingested** | `Syslog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/symantec-integrated-cyber-defense.md b/Tools/Solutions Analyzer/connector-docs/solutions/symantec-integrated-cyber-defense.md index 73ecc5805a3..aae7b430257 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/symantec-integrated-cyber-defense.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/symantec-integrated-cyber-defense.md @@ -13,27 +13,8 @@ ## Data Connectors -This solution provides **1 data connector(s)**. +**This solution does not include data connectors.** -### [Symantec Integrated Cyber Defense Exchange](../connectors/symantec.md) - -**Publisher:** Symantec - -Symantec ICDx connector allows you to easily connect your Symantec security solutions logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization’s network and improves your security operation capabilities. - -| | | -|--------------------------|---| -| **Tables Ingested** | `SymantecICDx_CL` | -| **Connector Definition Files** | [SymantecICDX.JSON](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Symantec%20Integrated%20Cyber%20Defense/Data%20Connectors/SymantecICDX.JSON) | - -[→ View full connector details](../connectors/symantec.md) - -## Tables Reference - -This solution ingests data into **1 table(s)**: - -| Table | Used By Connectors | -|-------|-------------------| -| `SymantecICDx_CL` | [Symantec Integrated Cyber Defense Exchange](../connectors/symantec.md) | +This solution may contain other components such as analytics rules, workbooks, hunting queries, or playbooks. [← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/symantec-vip.md b/Tools/Solutions Analyzer/connector-docs/solutions/symantec-vip.md index a86073f4e2a..2af558ca07a 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/symantec-vip.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/symantec-vip.md @@ -21,6 +21,49 @@ This solution provides **1 data connector(s)**. The [Symantec VIP](https://vip.symantec.com/) connector allows you to easily connect your Symantec VIP logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +**Custom Permissions:** +- **Symantec VIP**: must be configured to export logs via Syslog + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Symantec VIP and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Symantec%20VIP/Parsers/SymantecVIP.yaml), on the second line of the query, enter the hostname(s) of your Symantec VIP device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. + 2. Select **Apply below configuration to my machines** and select the facilities and severities. + 3. Click **Save**. +- **Open Syslog settings** + +**3. Configure and connect the Symantec VIP** + +[Follow these instructions](https://aka.ms/sentinel-symantecvip-configurationsteps) to configure the Symantec VIP Enterprise Gateway to forward syslog. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address. + | | | |--------------------------|---| | **Tables Ingested** | `Syslog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/symantecproxysg.md b/Tools/Solutions Analyzer/connector-docs/solutions/symantecproxysg.md index 971d7a6d53d..a69130655aa 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/symantecproxysg.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/symantecproxysg.md @@ -21,6 +21,57 @@ This solution provides **1 data connector(s)**. The [Symantec ProxySG](https://www.broadcom.com/products/cyber-security/network/gateway/proxy-sg-and-advanced-secure-gateway) allows you to easily connect your Symantec ProxySG logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigations. Integrating Symantec ProxySG with Microsoft Sentinel provides more visibility into your organization's network proxy traffic and will enhance security monitoring capabilities. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +**Custom Permissions:** +- **Symantec ProxySG**: must be configured to export logs via Syslog + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Symantec Proxy SG and load the function code or click [here](https://aka.ms/sentinel-SymantecProxySG-parser), on the second line of the query, enter the hostname(s) of your Symantec Proxy SG device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. + 2. Select **Apply below configuration to my machines** and select the facilities and severities. + 3. Click **Save**. +- **Open Syslog settings** + +**3. Configure and connect the Symantec ProxySG** + +1. Log in to the Blue Coat Management Console . + 2. Select Configuration > Access Logging > Formats. + 3. Select New. + 4. Enter a unique name in the Format Name field. + 5. Click the radio button for **Custom format string** and paste the following string into the field. +

1 $(date) $(time) $(time-taken) $(c-ip) $(cs-userdn) $(cs-auth-groups) $(x-exception-id) $(sc-filter-result) $(cs-categories) $(quot)$(cs(Referer))$(quot) $(sc-status) $(s-action) $(cs-method) $(quot)$(rs(Content-Type))$(quot) $(cs-uri-scheme) $(cs-host) $(cs-uri-port) $(cs-uri-path) $(cs-uri-query) $(cs-uri-extension) $(quot)$(cs(User-Agent))$(quot) $(s-ip) $(sr-bytes) $(rs-bytes) $(x-virus-id) $(x-bluecoat-application-name) $(x-bluecoat-application-operation) $(cs-uri-port) $(x-cs-client-ip-country) $(cs-threat-risk)

+ 6. Click the **OK** button. + 7. Click the **Apply** button. + 8. [Follow these instructions](https://knowledge.broadcom.com/external/article/166529/sending-access-logs-to-a-syslog-server.html) to enable syslog streaming of **Access** Logs. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address + | | | |--------------------------|---| | **Tables Ingested** | `Syslog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/syslog.md b/Tools/Solutions Analyzer/connector-docs/solutions/syslog.md index 20ba95122fa..2a33538b865 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/syslog.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/syslog.md @@ -29,6 +29,30 @@ Syslog is an event logging protocol that is common to Linux. Applications will s [Learn more >](https://aka.ms/sysLogInfo) +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace data sources** (Workspace): read and write permissions. + +**Custom Permissions:** + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Enable data collection rule​** + +You can collect Syslog events from your local machine by installing the agent on it. You can also collect Syslog generated on a different source by running the installation script below on the local machine, where the agent is installed. + +> Syslog logs are collected only from **Linux** agents. +- Configure SysLogAma data connector + +- **Create data collection rule** + +**2. Run the following command to install and apply the Syslog collector:** + +> To collect logs generated on a different machine run this script on the machine where the agent is installed. + | | | |--------------------------|---| | **Tables Ingested** | `Syslog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/talon.md b/Tools/Solutions Analyzer/connector-docs/solutions/talon.md index 446fc453eb2..c516cde5378 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/talon.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/talon.md @@ -21,6 +21,22 @@ This solution provides **1 data connector(s)**. The Talon Security Logs connector allows you to easily connect your Talon events and audit logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Please note the values below and follow the instructions here to connect your Talon Security events and audit logs with Microsoft Sentinel. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `Talon_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/team-cymru-scout.md b/Tools/Solutions Analyzer/connector-docs/solutions/team-cymru-scout.md index 11a6ef17811..54d17cf4474 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/team-cymru-scout.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/team-cymru-scout.md @@ -22,6 +22,182 @@ This solution provides **1 data connector(s)**. The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Permission to assign a role to the registered application**: Permission to assign a role to the registered application in Microsoft Entra ID is required. +- **Team Cymru Scout Credentials/permissions**: Team Cymru Scout account credentials(Username, Password) is required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Steps to Create Team Cymru Scout API Key** + + Follow these instructions to create a Team Cymru Scout API Key. + 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization. + +**STEP 2 - App Registration steps for the Application in Microsoft Entra ID** + + This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID: + 1. Sign in to the [Azure portal](https://portal.azure.com/). + 2. Search for and select **Microsoft Entra ID**. + 3. Under **Manage**, select **App registrations > New registration**. + 4. Enter a display **Name** for your application. + 5. Select **Register** to complete the initial app registration. + 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app) + +**STEP 3 - Add a client secret for application in Microsoft Entra ID** + + Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret: + 1. In the Azure portal, in **App registrations**, select your application. + 2. Select **Certificates & secrets > Client secrets > New client secret**. + 3. Add a description for your client secret. + 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months. + 5. Select **Add**. + 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret) + +**STEP 4 - Get Object ID of your application in Microsoft Entra ID** + + After creating your app registration, follow the steps in this section to get Object ID: + 1. Go to **Microsoft Entra ID**. + 2. Select **Enterprise applications** from the left menu. + 3. Find your newly created application in the list (you can search by the name you provided). + 4. Click on the application. + 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment. + +**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID** + + Follow the steps in this section to assign the role: + 1. In the Azure portal, Go to **Resource Group** and select your resource group. + 2. Go to **Access control (IAM)** from left panel. + 3. Click on **Add**, and then select **Add role assignment**. + 4. Select **Contributor** as role and click on next. + 5. In **Assign access to**, select `User, group, or service principal`. + 6. Click on **add members** and type **your app name** that you have created and select it. + 7. Now click on **Review + assign** and then again click on **Review + assign**. + +> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal) + +**STEP 6 - Upload csv with indictaors in Watchlist** + + Follow the steps in this section to upload csv containing indicators in watchlist: + 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace. + 2. Go to **Watchlist** under **Configuration** section from left panel. + 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**. + 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**. + 5. Once validation is successful, click on **Update**. + 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. + +> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist) + +**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +> + +**8. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the TeamCymruScout data connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the below information : + Location + WorkspaceName + Function Name + TeamCymruScoutBaseURL + AuthenticationType + Username + Password + APIKey + IPValues + DomainValues + APIType + AzureClientId + AzureClientSecret + TenantId + AzureEntraObjectId + IPTableName + DomainTableName + AccountUsageTableName + Schedule + AccountUsageSchedule + LogLevel +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**9. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX). + + e. **Select a runtime:** Choose Python 3.12 or above. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective values (case-sensitive): + CymruScoutBaseURL + AuthenticationType + TeamCymruScoutUsername + TeamCymruScoutPassword + APIKey + IPValues + DomainValues + APIType + AZURE_CLIENT_ID + AZURE_CLIENT_SECRET + AZURE_TENANT_ID + IPTableName + DomainTableName + AccountUsageTableName + Schedule + AccountUsageSchedule + LogLevel + AZURE_DATA_COLLECTION_ENDPOINT + AZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES + AZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES +4. Once all application settings have been entered, click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `Cymru_Scout_Account_Usage_Data_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/tenable-app.md b/Tools/Solutions Analyzer/connector-docs/solutions/tenable-app.md index 9d1d5e88f24..b44bf32304d 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/tenable-app.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/tenable-app.md @@ -26,6 +26,193 @@ This solution provides **2 data connector(s)**. The TVM data connector provides the ability to ingest Asset, Vulnerability, Compliance, WAS assets and WAS vulnerabilities data into Microsoft Sentinel using TVM REST APIs. Refer to [API documentation](https://developer.tenable.com/reference) for more information. The connector provides the ability to get data which helps to examine potential security risks, get insight into your computing assets, diagnose configuration problems and more +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: Both a **TenableAccessKey** and a **TenableSecretKey** is required to access the Tenable REST API. [See the documentation to learn more about API](https://developer.tenable.com/reference#vulnerability-management). Check all [requirements and follow the instructions](https://docs.tenable.com/vulnerability-management/Content/Settings/my-account/GenerateAPIKey.htm) for obtaining credentials. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Durable Functions to connect to the TenableVM API to pull [assets](https://developer.tenable.com/reference#exports-assets-download-chunk), [vulnerabilities](https://developer.tenable.com/reference#exports-vulns-request-export) and [compliance](https://developer.tenable.com/reference#exports-compliance-request-export)(if selected) at a regular interval into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This data connector depends on a [**TenableVM parser for vulnerabilities**](https://aka.ms/sentinel-TenableApp-TenableVMVulnerabilities-parser) and a [**TenableVM parser for assets**](https://aka.ms/sentinel-TenableApp-TenableVMAssets-parser) based on a Kusto Function to work as expected which is deployed with the Microsoft Sentinel Solution. + +**STEP 1 - Configuration steps for TenableVM** + + [Follow the instructions](https://docs.tenable.com/vulnerability-management/Content/Settings/my-account/GenerateAPIKey.htm) to obtain the required API credentials. + +**STEP 2 - App Registration steps for the Application in Microsoft Entra ID** + + This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID: + 1. Sign in to the [Azure portal](https://portal.azure.com/). + 2. Search for and select **Microsoft Entra ID**. + 3. Under **Manage**, select **App registrations > New registration**. + 4. Enter a display **Name** for your application. + 5. Select **Register** to complete the initial app registration. + 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TenableVM Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app) + +**STEP 3 - Add a client secret for application in Microsoft Entra ID** + + Sometimes called an application password, a client secret is a string value required for the execution of TenableVM Data Connector. Follow the steps in this section to create a new Client Secret: + 1. In the Azure portal, in **App registrations**, select your application. + 2. Select **Certificates & secrets > Client secrets > New client secret**. + 3. Add a description for your client secret. + 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months. + 5. Select **Add**. + 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TenableVM Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret) + +**STEP 4 - Get Object ID of your application in Microsoft Entra ID** + + After creating your app registration, follow the steps in this section to get Object ID: + 1. Go to **Microsoft Entra ID**. + 2. Select **Enterprise applications** from the left menu. + 3. Find your newly created application in the list (you can search by the name you provided). + 4. Click on the application. + 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment. + +**STEP 5 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function App** + +**6. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the TenableVM Vulnerability Management Report data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TenableVM-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-TenableVM-azuredeploy-gov) +2. Select the preferred **Subscription**, **Resource Group**, **FunctionApp Name** and **Location**. +3. Enter the below information : + + a. **WorkspaceName** - Enter the Workspace Name of the log analytics Workspace. + + b. **TenableAccessKey** - Enter Access key for using the Tenable API. + + c. **TenableSecretKey** - Enter Tenable Secret Key for Authentication. + + d. **AzureClientID** - Enter Azure Client ID. + + e. **AzureClientSecret** - Enter Azure Client Secret. + + f. **TenantID** - Enter Tenant ID got from above steps. + + g. **AzureEntraObjectId** - Enter Azure Object ID got from above steps. + + h. **LowestSeveritytoStore** - Lowest vulnerability severity to store. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. + + i. **ComplianceDataIngestion** - Select true if you want to enable Compliance data ingestion from Tenable VM. Default is false. + + j. **WASAssetDataIngestion** - Select true if you want to enable WAS Asset data ingestion from Tenable VM. Default is false. + + k. **WASVulnerabilityDataIngestion** - Select true if you want to enable WAS Vulnerability data ingestion from Tenable VM. Default is false. + + l. **LowestSeveritytoStoreWAS** - The Lowest Vulnerability severity to store for WAS. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. + + m. **TenableExportScheduleInMinutes** - Schedule in minutes to create new export job from Tenable VM. Default is 1440. + + n. **AssetTableName** - Enter name of the table used to store Asset Data logs. + + o. **VulnTableName** - Enter name of the table used to store Vulnerability Data logs. + + p. **ComplianceTableName** - Enter name of the table used to store Compliance Data logs. + + q. **WASAssetTableName** - Enter name of the table used to store WAS Asset Data logs. + + r. **WASVulnTableName** - Enter name of the table used to store WAS Vulnerability Data logs. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**7. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the TenableVM Vulnerability Management Report data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-TenableVMAzureSentinelConnector310Updated-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. TenableVMXXXXX). + + e. **Select a runtime:** Choose Python 3.12. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + + a. **WorkspaceName** - Enter the Workspace Name of the log analytics Workspace. + + b. **TenableAccessKey** - Enter Access key for using the Tenable API. + + c. **TenableSecretKey** - Enter Tenable Secret Key for Authentication. + + d. **AzureClientID** - Enter Azure Client ID. + + e. **AzureClientSecret** - Enter Azure Client Secret. + + f. **TenantID** - Enter Tenant ID got from above steps. + + g. **AzureEntraObjectId** - Enter Azure Object ID got from above steps. + + h. **LowestSeveritytoStore** - Lowest vulnerability severity to store. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. + + i. **ComplianceDataIngestion** - Select true if you want to enable Compliance data ingestion from Tenable VM. Default is false. + + j. **WASAssetDataIngestion** - Select true if you want to enable WAS Asset data ingestion from Tenable VM. Default is false. + + k. **WASVulnerabilityDataIngestion** - Select true if you want to enable WAS Vulnerability data ingestion from Tenable VM. Default is false. + + l. **LowestSeveritytoStoreWAS** - The Lowest Vulnerability severity to store for WAS. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. + + m. **TenableExportScheduleInMinutes** - Schedule in minutes to create new export job from Tenable VM. Default is 1440. + + n. **AssetTableName** - Enter name of the table used to store Asset Data logs. + + o. **VulnTableName** - Enter name of the table used to store Vulnerability Data logs. + + p. **ComplianceTableName** - Enter name of the table used to store Compliance Data logs. + + q. **WASAssetTableName** - Enter name of the table used to store WAS Asset Data logs. + + r. **WASVulnTableName** - Enter name of the table used to store WAS Vulnerability Data logs. + + s. **PyTenableUAVendor** - Value must be set to **Microsoft**. + + t. **PyTenableUAProduct** - Value must be set to **Azure Sentinel**. + + u. **PyTenableUABuild** - Value must be set to **0.0.1**. +3. Once all application settings have been entered, click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `Tenable_VM_Asset_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/tenablead.md b/Tools/Solutions Analyzer/connector-docs/solutions/tenablead.md index 3ff47457b7a..77baa5c93b9 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/tenablead.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/tenablead.md @@ -20,6 +20,96 @@ Tenable.ad connector allows to export Tenable.ad Indicators of Exposures, trailf It provides a data parser to manipulate the logs more easily. The different workbooks ease your Active Directory monitoring and provide different ways to visualize the data. The analytic templates allow to automate responses regarding different events, exposures, or attacks. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Access to Tenable.ad Configuration**: Permissions to configure syslog alerting engine + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://raw.githubusercontent.com/tenable/Azure-Sentinel/Tenable.ad-connector/Solutions/TenableAD/Parsers/afad_parser.kql) to create the Kusto Functions alias, **afad_parser** + +**1. Configure the Syslog server** + +You will first need a **linux Syslog** server that Tenable.ad will send logs to. Typically you can run **rsyslog** on **Ubuntu**. + You can then configure this server as you wish, but it is recommended to be able to output Tenable.ad logs in a separate file. + +Configure rsyslog to accept logs from your Tenable.ad IP address.: + +```shell +sudo -i + +# Set Tenable.ad source IP address +export TENABLE_AD_IP={Enter your IP address} + +# Create rsyslog configuration file +cat > /etc/rsyslog.d/80-tenable.conf << EOF +\$ModLoad imudp +\$UDPServerRun 514 +\$ModLoad imtcp +\$InputTCPServerRun 514 +\$AllowedSender TCP, 127.0.0.1, $TENABLE_AD_IP +\$AllowedSender UDP, 127.0.0.1, $TENABLE_AD_IP +\$template MsgTemplate,"%TIMESTAMP:::date-rfc3339% %HOSTNAME% %programname%[%procid%]:%msg%\n" +\$template remote-incoming-logs, "/var/log/%PROGRAMNAME%.log" +*.* ?remote-incoming-logs;MsgTemplate +EOF + +# Restart rsyslog +systemctl restart rsyslog +``` + +**2. Install and onboard the Microsoft agent for Linux** + +The OMS agent will receive the Tenable.ad syslog events and publish it in Sentinel : +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**3. Check agent logs on the Syslog server** + +```shell +tail -f /var/opt/microsoft/omsagent/log/omsagent.log +``` + +**4. Configure Tenable.ad to send logs to your Syslog server** + +On your **Tenable.ad** portal, go to *System*, *Configuration* and then *Syslog*. +From there you can create a new Syslog alert toward your Syslog server. + +Once this is done, check that the logs are correctly gathered on your server in a separate file (to do this, you can use the *Test the configuration* button in the Syslog alert configuration in Tenable.ad). +If you used the Quickstart template, the Syslog server will by default listen on port 514 in UDP and 1514 in TCP, without TLS. + +**5. Configure the custom logs** + +Configure the agent to collect the logs. + +1. In Sentinel, go to **Configuration** -> **Settings** -> **Workspace settings** -> **Custom logs**. +2. Click **Add custom log**. +3. Upload a sample Tenable.ad.log Syslog file from the **Linux** machine running the **Syslog** server and click **Next** +4. Set the record delimiter to **New Line** if not already the case and click **Next**. +5. Select **Linux** and enter the file path to the **Syslog** file, click **+** then **Next**. The default location of the file is `/var/log/Tenable.ad.log` if you have a Tenable version <3.1.0, you must also add this linux file location `/var/log/AlsidForAD.log`. +6. Set the **Name** to *Tenable_ad_CL* (Azure automatically adds *_CL* at the end of the name, there must be only one, make sure the name is not *Tenable_ad_CL_CL*). +7. Click **Next**, you will see a resume, then click **Create** + +**6. Enjoy !** + +> You should now be able to receive logs in the *Tenable_ad_CL* table, logs data can be parse using the **afad_parser()** function, used by all query samples, workbooks and analytic templates. + | | | |--------------------------|---| | **Tables Ingested** | `Tenable_ad_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/tenableio.md b/Tools/Solutions Analyzer/connector-docs/solutions/tenableio.md index b640f9f4220..89648075930 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/tenableio.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/tenableio.md @@ -21,6 +21,94 @@ This solution provides **1 data connector(s)**. The [Tenable.io](https://www.tenable.com/products/tenable-io) data connector provides the capability to ingest Asset and Vulnerability data into Microsoft Sentinel through the REST API from the Tenable.io platform (Managed in the cloud). Refer to [API documentation](https://developer.tenable.com/reference) for more information. The connector provides the ability to get data which helps to examine potential security risks, get insight into your computing assets, diagnose configuration problems and more +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: Both a **TenableAccessKey** and a **TenableSecretKey** is required to access the Tenable REST API. [See the documentation to learn more about API](https://developer.tenable.com/reference#vulnerability-management). Check all [requirements and follow the instructions](https://docs.tenable.com/tenableio/vulnerabilitymanagement/Content/Settings/GenerateAPIKey.htm) for obtaining credentials. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Durable Functions to connect to the Tenable.io API to pull [assets](https://developer.tenable.com/reference#exports-assets-download-chunk) and [vulnerabilities](https://developer.tenable.com/reference#exports-vulns-request-export) at a regular interval into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This data connector depends on a [**Tenable.io parser for vulnerabilities**](https://aka.ms/sentinel-TenableIO-TenableIOVulnerabilities-parser) and a [**Tenable.io parser for assets**](https://aka.ms/sentinel-TenableIO-TenableIOAssets-parser) based on a Kusto Function to work as expected which is deployed with the Microsoft Sentinel Solution. + +**STEP 1 - Configuration steps for Tenable.io** + + [Follow the instructions](https://docs.tenable.com/tenableio/vulnerabilitymanagement/Content/Settings/GenerateAPIKey.htm) to obtain the required API credentials. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function App** + +>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Tenable.io Vulnerability Management Report data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TenableIO-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **TenableAccessKey** and **TenableSecretKey** and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Tenable.io Vulnerability Management Report data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-TenableIO-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. TenableIOXXXXX). + + e. **Select a runtime:** Choose Python 3.8. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + TenableAccessKey + TenableSecretKey + WorkspaceID + WorkspaceKey + logAnalyticsUri (optional) +> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +3. Once all application settings have been entered, click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `Tenable_IO_Assets_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/thehive.md b/Tools/Solutions Analyzer/connector-docs/solutions/thehive.md index d56deb4fe53..def6440b41d 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/thehive.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/thehive.md @@ -21,6 +21,80 @@ This solution provides **1 data connector(s)**. The [TheHive](http://thehive-project.org/) data connector provides the capability to ingest common TheHive events into Microsoft Sentinel through Webhooks. TheHive can notify external system of modification events (case creation, alert update, task assignment) in real time. When a change occurs in the TheHive, an HTTPS POST request with event information is sent to a callback data connector URL. Refer to [Webhooks documentation](https://docs.thehive-project.org/thehive/legacy/thehive3/admin/webhooks/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Webhooks Credentials/permissions**: **TheHiveBearerToken**, **Callback URL** are required for working Webhooks. See the documentation to learn more about [configuring Webhooks](https://docs.thehive-project.org/thehive/installation-and-configuration/configuration/webhooks/). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector uses Azure Functions based on HTTP Trigger for waiting POST requests with logs to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**TheHive**](https://aka.ms/sentinel-TheHive-parser) which is deployed with the Microsoft Sentinel Solution. + +**STEP 1 - Configuration steps for the TheHive** + + Follow the [instructions](https://docs.thehive-project.org/thehive/installation-and-configuration/configuration/webhooks/) to configure Webhooks. + +1. Authentication method is *Beared Auth*. +2. Generate the **TheHiveBearerToken** according to your password policy. +3. Setup Webhook notifications in the *application.conf* file including **TheHiveBearerToken** parameter. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the TheHive data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Option 1 - Azure Resource Manager (ARM) Template** + + Use this method for automated deployment of the TheHive data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TheHive-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. +3. Enter the **TheHiveBearerToken** and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. +6. After deploying open Function App page, select your app, go to the **Functions** and click **Get Function Url** copy it and follow p.7 from STEP 1. + + **Option 2 - Manual Deployment of Azure Functions** + + Use the following step-by-step instructions to deploy the TheHive data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-TheHive-functionapp) file. Extract archive to your local development computer. +2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode. +3. After successful deployment of the function app, follow next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. Go to Azure Portal for the Function App configuration. +2. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select ** New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + TheHiveBearerToken + WorkspaceID + WorkspaceKey + logAnalyticsUri (optional) +> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `TheHive_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/theom.md b/Tools/Solutions Analyzer/connector-docs/solutions/theom.md index c6dd2068713..d65f784e5ae 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/theom.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/theom.md @@ -21,6 +21,27 @@ This solution provides **1 data connector(s)**. Theom Data Connector enables organizations to connect their Theom environment to Microsoft Sentinel. This solution enables users to receive alerts on data security risks, create and enrich incidents, check statistics and trigger SOAR playbooks in Microsoft Sentinel +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +1. In **Theom UI Console** click on **Manage -> Alerts** on the side bar. +2. Select **Sentinel** tab. +3. Click on **Active** button to enable the configuration. +4. Enter `Primary` key as `Authorization Token` +5. Enter `Endpoint URL` as `https://.ods.opinsights.azure.com/api/logs?api-version=2016-04-01` +6. Click on `SAVE SETTINGS` +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `TheomAlerts_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/threat-intelligence-(new).md b/Tools/Solutions Analyzer/connector-docs/solutions/threat-intelligence-(new).md index 345525aae86..27a3c79b0e9 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/threat-intelligence-(new).md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/threat-intelligence-(new).md @@ -41,12 +41,56 @@ This solution provides **6 data connector(s)**. Microsoft Sentinel offers a data plane API to bring in threat intelligence from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, file hashes and email addresses. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2269830&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. You can connect your threat intelligence data sources to Microsoft Sentinel by either:** + +>Using an integrated Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, and others. + +>Calling the Microsoft Sentinel data plane API directly from another application. + - Note: The 'Status' of the connector will not appear as 'Connected' here, because the data is ingested by making an API call. + +**2. Follow These Steps to Connect to your Threat Intelligence:** + +**1. Get Microsoft Entra ID Access Token** + +To send request to the APIs, you need to acquire Microsoft Entra ID access token. You can follow instruction in this page: https://docs.microsoft.com/azure/databricks/dev-tools/api/latest/aad/app-aad-token#get-an-azure-ad-access-token + - Notice: Please request Microsoft Entra ID access token with scope value: +Fairfax: https://management.usgovcloudapi.net/.default +Mooncake: https://management.chinacloudapi.cn/.default + +**2. Send STIX objects to Sentinel** + +You can send the supported STIX object types by calling our Upload API. For more information about the API, click [here](https://learn.microsoft.com/azure/sentinel/stix-objects-api). + +>HTTP method: POST + +>Endpoint: +Fairfax: https://api.ti.sentinel.azure.us/workspaces/[WorkspaceID]/threatintelligence-stix-objects:upload?api-version=2024-02-01-preview +Mooncake: https://api.ti.sentinel.azure.cn/workspaces/[WorkspaceID]/threatintelligence-stix-objects:upload?api-version=2024-02-01-preview + +>WorkspaceID: the workspace that the STIX objects are uploaded to. + + +>Header Value 1: "Authorization" = "Bearer [Microsoft Entra ID Access Token from step 1]" + + +> Header Value 2: "Content-Type" = "application/json" + +>Body: The body is a JSON object containing an array of STIX objects. + | | | |--------------------------|---| | **Tables Ingested** | `ThreatIntelIndicators` | | | `ThreatIntelObjects` | -| **Connector Definition Files** | [template_ThreatIntelligenceUploadIndicators.json](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligenceUploadIndicators.json) | -| | [template_ThreatIntelligenceUploadIndicators_ForGov.json](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligenceUploadIndicators_ForGov.json) | +| **Connector Definition Files** | [template_ThreatIntelligenceUploadIndicators_ForGov.json](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligenceUploadIndicators_ForGov.json) | [→ View full connector details](../connectors/threatintelligenceuploadindicatorsapi.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/threat-intelligence.md b/Tools/Solutions Analyzer/connector-docs/solutions/threat-intelligence.md index 0b7c8371b10..7c144c47b2c 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/threat-intelligence.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/threat-intelligence.md @@ -37,11 +37,55 @@ This solution provides **5 data connector(s)**. Microsoft Sentinel offers a data plane API to bring in threat intelligence from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, file hashes and email addresses. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2269830&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permissions are required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. You can connect your threat intelligence data sources to Microsoft Sentinel by either:** + +>Using an integrated Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, and others. + +>Calling the Microsoft Sentinel data plane API directly from another application. + - Note: The 'Status' of the connector will not appear as 'Connected' here, because the data is ingested by making an API call. + +**2. Follow These Steps to Connect to your Threat Intelligence:** + +**1. Get Microsoft Entra ID Access Token** + +To send request to the APIs, you need to acquire Microsoft Entra ID access token. You can follow instruction in this page: https://docs.microsoft.com/azure/databricks/dev-tools/api/latest/aad/app-aad-token#get-an-azure-ad-access-token + - Notice: Please request Microsoft Entra ID access token with scope value: +Fairfax: https://management.usgovcloudapi.net/.default +Mooncake: https://management.chinacloudapi.cn/.default + +**2. Send STIX objects to Sentinel** + +You can send the supported STIX object types by calling our Upload API. For more information about the API, click [here](https://learn.microsoft.com/azure/sentinel/stix-objects-api). + +>HTTP method: POST + +>Endpoint: +Fairfax: https://api.ti.sentinel.azure.us/workspaces/[WorkspaceID]/threatintelligence-stix-objects:upload?api-version=2024-02-01-preview +Mooncake: https://api.ti.sentinel.azure.cn/workspaces/[WorkspaceID]/threatintelligence-stix-objects:upload?api-version=2024-02-01-preview + +>WorkspaceID: the workspace that the STIX objects are uploaded to. + + +>Header Value 1: "Authorization" = "Bearer [Microsoft Entra ID Access Token from step 1]" + + +> Header Value 2: "Content-Type" = "application/json" + +>Body: The body is a JSON object containing an array of STIX objects. + | | | |--------------------------|---| | **Tables Ingested** | `ThreatIntelligenceIndicator` | -| **Connector Definition Files** | [template_ThreatIntelligenceUploadIndicators.json](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence/Data%20Connectors/template_ThreatIntelligenceUploadIndicators.json) | -| | [template_ThreatIntelligenceUploadIndicators_ForGov.json](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence/Data%20Connectors/template_ThreatIntelligenceUploadIndicators_ForGov.json) | +| **Connector Definition Files** | [template_ThreatIntelligenceUploadIndicators_ForGov.json](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence/Data%20Connectors/template_ThreatIntelligenceUploadIndicators_ForGov.json) | [→ View full connector details](../connectors/threatintelligenceuploadindicatorsapi.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/tomcat.md b/Tools/Solutions Analyzer/connector-docs/solutions/tomcat.md index 60981008044..0b67665c82b 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/tomcat.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/tomcat.md @@ -21,6 +21,63 @@ This solution provides **1 data connector(s)**. The Apache Tomcat solution provides the capability to ingest [Apache Tomcat](http://tomcat.apache.org/) events into Microsoft Sentinel. Refer to [Apache Tomcat documentation](http://tomcat.apache.org/tomcat-10.0-doc/logging.html) for more information. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias TomcatEvent and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tomcat/Parsers/TomcatEvent.txt).The function usually takes 10-15 minutes to activate after solution installation/update. + +>**NOTE:** This data connector has been developed using Apache Tomcat version 10.0.4 + +**1. Install and onboard the agent for Linux or Windows** + +Install the agent on the Apache Tomcat Server where the logs are generated. + +> Logs from Apache Tomcat Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure the logs to be collected** + +Configure the custom log directory to be collected +- **Open custom logs settings** + +1. Select the link above to open your workspace advanced settings +2. From the left pane, select **Data**, select **Custom Logs** and click **Add+** +3. Click **Browse** to upload a sample of a Tomcat log file (e.g. access.log or error.log). Then, click **Next >** +4. Select **New line** as the record delimiter and click **Next >** +5. Select **Windows** or **Linux** and enter the path to Tomcat logs based on your configuration. Example: + - **Linux** Directory: '/var/log/tomcat/*.log' +6. After entering the path, click the '+' symbol to apply, then click **Next >** +7. Add **Tomcat_CL** as the custom log Name and click **Done** + | | | |--------------------------|---| | **Tables Ingested** | `Tomcat_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/transmitsecurity.md b/Tools/Solutions Analyzer/connector-docs/solutions/transmitsecurity.md index 2a5cfd429ad..833e189efb6 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/transmitsecurity.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/transmitsecurity.md @@ -14,27 +14,8 @@ ## Data Connectors -This solution provides **1 data connector(s)**. +**This solution does not include data connectors.** -### [Transmit Security Connector](../connectors/transmitsecurity.md) - -**Publisher:** TransmitSecurity - -The [Transmit Security] data connector provides the capability to ingest common Transmit Security API events into Microsoft Sentinel through the REST API. [Refer to API documentation for more information](https://developer.transmitsecurity.com/). The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more. - -| | | -|--------------------------|---| -| **Tables Ingested** | `TransmitSecurityActivity_CL` | -| **Connector Definition Files** | [TransmitSecurity_API_FunctionApp.JSON](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TransmitSecurity/Data%20Connectors/TransmitSecurity_API_FunctionApp.JSON) | - -[→ View full connector details](../connectors/transmitsecurity.md) - -## Tables Reference - -This solution ingests data into **1 table(s)**: - -| Table | Used By Connectors | -|-------|-------------------| -| `TransmitSecurityActivity_CL` | [Transmit Security Connector](../connectors/transmitsecurity.md) | +This solution may contain other components such as analytics rules, workbooks, hunting queries, or playbooks. [← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/trend-micro-apex-one.md b/Tools/Solutions Analyzer/connector-docs/solutions/trend-micro-apex-one.md index 1e0cccb19bb..7b9f23f1a7d 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/trend-micro-apex-one.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/trend-micro-apex-one.md @@ -26,6 +26,61 @@ This solution provides **2 data connector(s)**. The [Trend Micro Apex One](https://www.trendmicro.com/en_us/business/products/user-protection/sps/endpoint.html) data connector provides the capability to ingest [Trend Micro Apex One events](https://aka.ms/sentinel-TrendMicroApex-OneEvents) into Microsoft Sentinel. Refer to [Trend Micro Apex Central](https://aka.ms/sentinel-TrendMicroApex-OneCentral) for more information. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>This data connector depends on a parser based on a Kusto Function to work as expected [**TMApexOneEvent**](https://aka.ms/sentinel-TMApexOneEvent-parser) which is deployed with the Microsoft Sentinel Solution. +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Common Event Format (CEF) logs to Syslog agent** + + [Follow these steps](https://docs.trendmicro.com/en-us/enterprise/trend-micro-apex-central-2019-online-help/detections/logs_001/syslog-forwarding.aspx) to configure Apex Central sending alerts via syslog. While configuring, on step 6, select the log format **CEF**. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/trend-micro-cloud-app-security.md b/Tools/Solutions Analyzer/connector-docs/solutions/trend-micro-cloud-app-security.md index 53c459594d9..bdf49aa11ab 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/trend-micro-cloud-app-security.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/trend-micro-cloud-app-security.md @@ -21,6 +21,98 @@ This solution provides **1 data connector(s)**. The [Trend Micro Cloud App Security](https://www.trendmicro.com/en_be/business/products/user-protection/sps/email-and-collaboration/cloud-app-security.html) data connector provides the capability to retrieve security event logs of the services that Cloud App Security protects and more events into Microsoft Sentinel through the Log Retrieval API. Refer to API [documentation](https://docs.trendmicro.com/en-us/enterprise/cloud-app-security-integration-api-online-help/supported-cloud-app-/log-retrieval-api/get-security-logs.aspx) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **TrendMicroCASToken** and **TrendMicroCASServiceURL** are required for making API calls. See the [documentation](https://docs.trendmicro.com/en-us/enterprise/cloud-app-security-integration-api-online-help/getting-started-with/using-cloud-app-secu.aspx) to learn more about API. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Azure Blob Storage API to pull logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**TrendMicroCAS**](https://aka.ms/sentinel-TrendMicroCAS-parser) which is deployed with the Microsoft Sentinel Solution. + +**STEP 1 - Configuration steps for the Trend Micro Log Retrieval API** + + Follow the instructions to obtain the credentials. + +1. Obtain the **TrendMicroCASToken** using the [documentation](https://docs.trendmicro.com/en-us/enterprise/cloud-app-security-integration-api-online-help/getting-started-with/generating-an-authen.aspx). +2. Save credentials for using in the data connector. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Trend Micro Cloud App Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Trend Micro Cloud App Security data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TrendMicroCAS-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. +3. Enter the **TrendMicroCASToken**, **TrendMicroCASServiceURL** and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Trend Micro Cloud App Security data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-TMCASAPI-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. TMCASXXXXX). + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select ** New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + TrendMicroCASToken + TrendMicroCASServiceURL + WorkspaceID + WorkspaceKey + logAnalyticsUri (optional) +> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `TrendMicroCAS_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/trend-micro-deep-security.md b/Tools/Solutions Analyzer/connector-docs/solutions/trend-micro-deep-security.md index d8e10c024aa..14569ff3fe4 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/trend-micro-deep-security.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/trend-micro-deep-security.md @@ -21,6 +21,63 @@ This solution provides **1 data connector(s)**. The Trend Micro Deep Security connector allows you to easily connect your Deep Security logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's networks/systems and improves your security operation capabilities. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Trend Micro Deep Security logs to Syslog agent** + +1. Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure to send the logs to port 514 TCP on the machine's IP address. +2. Forward Trend Micro Deep Security events to the Syslog agent. +3. Define a new Syslog Configuration that uses the CEF format by referencing [this knowledge article](https://aka.ms/Sentinel-trendmicro-kblink) for additional information. +4. Configure the Deep Security Manager to use this new configuration to forward events to the Syslog agent using [these instructions](https://aka.ms/Sentinel-trendMicro-connectorInstructions). +5. Make sure to save the [TrendMicroDeepSecurity](https://aka.ms/TrendMicroDeepSecurityFunction) function so that it queries the Trend Micro Deep Security data properly. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/trend-micro-tippingpoint.md b/Tools/Solutions Analyzer/connector-docs/solutions/trend-micro-tippingpoint.md index c0efceb5b9f..d1033ad935b 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/trend-micro-tippingpoint.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/trend-micro-tippingpoint.md @@ -21,6 +21,61 @@ This solution provides **1 data connector(s)**. The Trend Micro TippingPoint connector allows you to easily connect your TippingPoint SMS IPS events with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's networks/systems and improves your security operation capabilities. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias TrendMicroTippingPoint and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20TippingPoint/Parsers/TrendMicroTippingPoint).The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Trend Micro TippingPoint SMS logs to Syslog agent** + +Set your TippingPoint SMS to send Syslog messages in ArcSight CEF Format v4.2 format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/trend-micro-vision-one.md b/Tools/Solutions Analyzer/connector-docs/solutions/trend-micro-vision-one.md index 5b7b019bfe6..94580530882 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/trend-micro-vision-one.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/trend-micro-vision-one.md @@ -26,6 +26,50 @@ The [Trend Vision One](https://www.trendmicro.com/en_us/business/products/detect The Trend Vision One connector is supported in Microsoft Sentinel in the following regions: Australia East, Australia Southeast, Brazil South, Canada Central, Canada East, Central India, Central US, East Asia, East US, East US 2, France Central, Japan East, Korea Central, North Central US, North Europe, Norway East, South Africa North, South Central US, Southeast Asia, Sweden Central, Switzerland North, UAE North, UK South, UK West, West Europe, West US, West US 2, West US 3. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Trend Vision One API Token**: A Trend Vision One API Token is required. See the documentation to learn more about the [Trend Vision One API](https://docs.trendmicro.com/documentation/article/trend-vision-one-api-keys-third-party-apps). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Trend Vision One API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Configuration steps for the Trend Vision One API** + + [Follow these instructions](https://docs.trendmicro.com/documentation/article/trend-vision-one-api-keys-third-party-apps) to create an account and an API authentication token. + +**STEP 2 - Use the below deployment option to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Trend Vision One connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Trend Vision One API Authorization Token, readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Azure Resource Manager (ARM) Template Deployment** + +This method provides an automated deployment of the Trend Vision One connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-trendmicroxdr-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter a unique **Function Name**, **Workspace ID**, **Workspace Key**, **API Token** and **Region Code**. + - Note: Provide the appropriate region code based on where your Trend Vision One instance is deployed: us, eu, au, in, sg, jp + - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + | | | |--------------------------|---| | **Tables Ingested** | `TrendMicro_XDR_OAT_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/ubiquiti-unifi.md b/Tools/Solutions Analyzer/connector-docs/solutions/ubiquiti-unifi.md index 79a4625ae1d..f2be76d35d3 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/ubiquiti-unifi.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/ubiquiti-unifi.md @@ -21,6 +21,68 @@ This solution provides **1 data connector(s)**. The [Ubiquiti UniFi](https://www.ui.com/) data connector provides the capability to ingest [Ubiquiti UniFi firewall, dns, ssh, AP events](https://help.ui.com/hc/en-us/articles/204959834-UniFi-How-to-View-Log-Files) into Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**UbiquitiAuditEvent**](https://aka.ms/sentinel-UbiquitiUnifi-parser) which is deployed with the Microsoft Sentinel Solution. + +>**NOTE:** This data connector has been developed using Enterprise System Controller Release Version: 5.6.2 (Syslog) + +**1. Install and onboard the agent for Linux or Windows** + +Install the agent on the Server to which the Ubiquiti logs are forwarder from Ubiquiti device (e.g.remote syslog server) + +> Logs from Ubiquiti Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure the logs to be collected** + +Follow the configuration steps below to get Ubiquiti logs into Microsoft Sentinel. Refer to the [Azure Monitor Documentation](https://docs.microsoft.com/azure/azure-monitor/agents/data-sources-json) for more details on these steps. +1. Configure log forwarding on your Ubiquiti controller: + + i. Go to Settings > System Setting > Controller Configuration > Remote Logging and enable the Syslog and Debugging (optional) logs (Refer to [User Guide](https://dl.ui.com/guides/UniFi/UniFi_Controller_V5_UG.pdf) for detailed instructions). +2. Download config file [Ubiquiti.conf](https://aka.ms/sentinel-UbiquitiUnifi-conf). +3. Login to the server where you have installed Azure Log Analytics agent. +4. Copy Ubiquiti.conf to the /etc/opt/microsoft/omsagent/**workspace_id**/conf/omsagent.d/ folder. +5. Edit Ubiquiti.conf as follows: + + i. specify port which you have set your Ubiquiti device to forward logs to (line 4) + + ii. replace **workspace_id** with real value of your Workspace ID (lines 14,15,16,19) +5. Save changes and restart the Azure Log Analytics agent for Linux service with the following command: + sudo /opt/microsoft/omsagent/bin/service_control restart +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `Ubiquiti_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/valence-security.md b/Tools/Solutions Analyzer/connector-docs/solutions/valence-security.md index b7702949d7c..df39249ae50 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/valence-security.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/valence-security.md @@ -21,6 +21,33 @@ This solution provides **1 data connector(s)**. Connects the Valence SaaS security platform Azure Log Analytics via the REST API interface. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Step 1 : Read the detailed documentation** + +The installation process is documented in great detail in [Valence Security's knowledge base](https://support.valencesecurity.com). The user should consult this documentation further to understand installation and debug of the integration. + +**2. Step 2: Retrieve the workspace access credentials** + +The first installation step is to retrieve both your **Workspace ID** and **Primary Key** from the Microsoft Sentinel platform. +Copy the values shown below and save them for configuration of the API log forwarder integration. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Step 3: Configure Sentinel integration on the Valence Security Platform** + +As a Valence Security Platform admin, go to the [configuration screen](https://app.valencesecurity.com/settings/configuration), click Connect in the SIEM Integration card, and choose Microsoft Sentinel. Paste the values from the previous step and click Connect. Valence will test the connection so when success is reported, the connection worked. + | | | |--------------------------|---| | **Tables Ingested** | `ValenceAlert_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/varmour-application-controller.md b/Tools/Solutions Analyzer/connector-docs/solutions/varmour-application-controller.md index 0a015c24a42..e9514847760 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/varmour-application-controller.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/varmour-application-controller.md @@ -25,6 +25,69 @@ This solution provides **2 data connector(s)**. vArmour reduces operational risk and increases cyber resiliency by visualizing and controlling application relationships across the enterprise. This vArmour connector enables streaming of Application Controller Violation Alerts into Microsoft Sentinel, so you can take advantage of search & correlation, alerting, & threat intelligence enrichment for each log. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Configure the vArmour Application Controller to forward Common Event Format (CEF) logs to the Syslog agent** + + Send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address. +**1 Download the vArmour Application Controller user guide** + + Download the user guide from https://support.varmour.com/hc/en-us/articles/360057444831-vArmour-Application-Controller-6-0-User-Guide. + + **2 Configure the Application Controller to Send Policy Violations** + + In the user guide - refer to "Configuring Syslog for Monitoring and Violations" and follow steps 1 to 3. + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/varonis-purview.md b/Tools/Solutions Analyzer/connector-docs/solutions/varonis-purview.md index efa397538c8..28b94bc67c1 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/varonis-purview.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/varonis-purview.md @@ -22,6 +22,42 @@ This solution provides **1 data connector(s)**. The [Varonis Purview](https://www.varonis.com/) connector provides the capability to sync resources from Varonis to Microsoft Purview. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. + +**Custom Permissions:** +- **Microsoft Entra**: Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher. +- **Microsoft Azure**: Permission to assign Monitoring Metrics Publisher role on data collection rule (DCR). Typically requires Azure RBAC Owner or User Access Administrator role + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Run this to setup ingestion for Varonis Resoources** + +This will create the necessary Log Analytics tables, Data Collection Rule (DCR), and an Entra application to securely send data to the DCR. +#### Automated Configuration and Secure Data Ingestion with Entra Application +Clicking on "Deploy" will trigger the creation of Log Analytics tables and a Data Collection Rule (DCR). +It will then create an Entra application, link the DCR to it, and set the entered secret in the application. This setup enables data to be sent securely to the DCR using an Entra token. +Deploy Varonis connector resources + +**2. Push your logs into the workspace** + +Use the following parameters to configure the Varonis Purview Connector in your Varonis integrations dashboard. +- **Tenant ID (Directory ID)**: `TenantId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Entra App Registration Application ID**: `ApplicationId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Entra App Registration Secret**: `ApplicationSecret` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Data Collection Endpoint Uri**: `DataCollectionEndpoint` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Data Collection Rule Immutable ID**: `DataCollectionRuleId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Resources Stream Name**: `Custom-varonisresources` + | | | |--------------------------|---| | **Tables Ingested** | `varonisresources_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/varonissaas.md b/Tools/Solutions Analyzer/connector-docs/solutions/varonissaas.md index c6c82279d82..62c878b2b44 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/varonissaas.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/varonissaas.md @@ -26,6 +26,50 @@ Varonis SaaS provides the capability to ingest [Varonis Alerts](https://www.varo Varonis prioritizes deep data visibility, classification capabilities, and automated remediation for data access. Varonis builds a single prioritized view of risk for your data, so you can proactively and systematically eliminate risk from insider threats and cyberattacks. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to Varonis DatAlert service to pull alerts into Microsoft Sentinel. This might result in additional data ingestion costs. See the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +**For Azure function and related services installation use:** + + [![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVaronisSaaS%2FData%2520Connectors%2Fazuredeploy.json) + +STEP 1 - Obtain the Varonis DatAlert Endpoint API credentials. + + To generate the Client ID and API key: + 1. Launch the Varonis Web Interface. + 2. Navigate to Configuration -> API Keys. The API Keys page is displayed. + 3. Click Create API Key. The Add New API Key settings are displayed on the right. + 4. Fill in the name and description. + 5. Click the Generate Key button. + 6. Copy the API key secret and save it in a handy location. You won't be able to copy it again. + +For additional information, please check: [Varonis Documentation](https://help.varonis.com/s/document-item?bundleId=ami1661784208197&topicId=emp1703144742927.html&_LANG=enus) + +STEP 2 - Deploy the connector and the associated Azure Function. +- **Workspace Name**: `WorkspaceName` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +Use this method for automated deployment of the data connector using an ARM Template. + +1. Click the Deploy to Azure button. + + [![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVaronisSaaS%2FData%2520Connectors%2Fazuredeploy.json) +2. Select the preferred Subscription, Resource Group, Region, Storage Account Type. +3. Enter Log Analytics Workspace Name, Varonis FQDN, Varonis SaaS API Key. +4. Click Review + Create, Create. + | | | |--------------------------|---| | **Tables Ingested** | `VaronisAlerts_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/vectra-ai-detect.md b/Tools/Solutions Analyzer/connector-docs/solutions/vectra-ai-detect.md index ca70b6a41b5..8fcf806faff 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/vectra-ai-detect.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/vectra-ai-detect.md @@ -26,6 +26,78 @@ This solution provides **2 data connector(s)**. The AI Vectra Detect connector allows users to connect Vectra Detect logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives users more insight into their organization's network and improves their security operation capabilities. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward AI Vectra Detect logs to Syslog agent in CEF format** + + Configure Vectra (X Series) Agent to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent. + +From the Vectra UI, navigate to Settings > Notifications and Edit Syslog configuration. Follow below instructions to set up the connection: + +- Add a new Destination (which is the host where the Microsoft Sentinel Syslog Agent is running) + +- Set the Port as **514** + +- Set the Protocol as **UDP** + +- Set the format to **CEF** + +- Set Log types (Select all log types available) + +- Click on **Save** + +User can click the **Test** button to force send some test events. + + For more information, refer to Cognito Detect Syslog Guide which can be downloaded from the ressource page in Detect UI. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/vectra-ai-stream.md b/Tools/Solutions Analyzer/connector-docs/solutions/vectra-ai-stream.md index bc90f7452f0..48d29c89214 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/vectra-ai-stream.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/vectra-ai-stream.md @@ -26,6 +26,101 @@ This solution provides **2 data connector(s)**. The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Vectra AI Stream configuration**: must be configured to export Stream metadata in JSON + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution. + +>**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed! + + In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector. +**Please proceed with these steps:** + +**Step 1. Create custom tables in Log Analytic Workspace (ARM Template)** + + 1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json) +2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**) +4. Click **Review + Create** to deploy. + + _Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._ + + **Step 2. Install the Syslog via AMA Data connector** + + _Note: This is only required if it has not been install yet in Microsoft Sentinel._ +1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub. + +2. Search for 'Syslog' (Provider is Microsoft) and select it. + +3. Check 'Install' buton on the bottom of the right panel. + + **Step 3. Configure the Syslog via AMA data connector** + + _Note: Two different Data Collection Rules (DCR) are going to be created during this step_ +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector. + +2. Search for 'Syslog via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE. + +4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE. + +5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE + + + + Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy. + +In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables. +**Please proceed with these steps:** + +**Step 1. Modify the syslog-ng configuration** + + _Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._ +1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf). +2. Log into the instance where syslog-ng/AMA is running. +3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded. +4. Save and restart syslog-ng (_systemctl restart syslog-ng_). + + **Step 2. Modify the Data Collection rules configuration** + + _Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_ + 1. Locate the 2 DCR that you created in Microsoft Sentinel. + 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template. + 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name. + 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded. + 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3). + 6. Save --> Review + Create --> Create. + 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template). + 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name. + 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded. + 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace. + 11. Save --> Review + Create --> Create. + +**2. Configure Vectra AI Stream** + +Configure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA. + +From the Vectra UI, navigate to Settings > Stream and Edit the destination configuration: + + 1. Select Publisher: RAW JSON + 2. Set the server IP or hostname (which is the host whhere AMA is running) + 3. Set all the port to **514**. + 4. Save. + +**3. Run the following command to validate (or set up) that syslog-ng is listening on port 514** + | | | |--------------------------|---| | **Tables Ingested** | `vectra_beacon_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/vectra-xdr.md b/Tools/Solutions Analyzer/connector-docs/solutions/vectra-xdr.md index c3d386e034b..6aa5b916157 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/vectra-xdr.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/vectra-xdr.md @@ -22,6 +22,226 @@ This solution provides **1 data connector(s)**. The [Vectra XDR](https://www.vectra.ai/) connector gives the capability to ingest Vectra Detections, Audits, Entity Scoring, Lockdown, Health and Entities data into Microsoft Sentinel through the Vectra REST API. Refer to the API documentation: `https://support.vectra.ai/s/article/KB-VS-1666` for more information. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **Vectra Client ID** and **Client Secret** is required for Health, Entity Scoring, Entities, Detections, Lockdown and Audit data collection. See the documentation to learn more about API on the `https://support.vectra.ai/s/article/KB-VS-1666`. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Vectra API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Follow these steps for [Detections Parser](https://aka.ms/sentinel-VectraDetections-parser), [Audits Parser](https://aka.ms/sentinel-VectraAudits-parser), [Entity Scoring Parser](https://aka.ms/sentinel-VectraEntityScoring-parser), [Lockdown Parser](https://aka.ms/sentinel-VectraLockdown-parser) and [Health Parser](https://aka.ms/sentinel-VectraHealth-parser) to create the Kusto functions alias, **VectraDetections**, **VectraAudits**, **VectraEntityScoring**, **VectraLockdown** and **VectraHealth**. + +**STEP 1 - Configuration steps for the Vectra API Credentials** + + Follow these instructions to create a Vectra Client ID and Client Secret. + 1. Log into your Vectra portal + 2. Navigate to Manage -> API Clients + 3. From the API Clients page, select 'Add API Client' to create a new client. + 4. Add Client Name, select Role and click on Generate Credentials to obtain your client credentials. + 5. Be sure to record your Client ID and Secret Key for safekeeping. You will need these two pieces of information to obtain an access token from the Vectra API. An access token is required to make requests to all of the Vectra API endpoints. + +**STEP 2 - App Registration steps for the Application in Microsoft Entra ID** + + This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID: + 1. Sign in to the [Azure portal](https://portal.azure.com/). + 2. Search for and select **Microsoft Entra ID**. + 3. Under **Manage**, select **App registrations > New registration**. + 4. Enter a display **Name** for your application. + 5. Select **Register** to complete the initial app registration. + 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Vectra Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app) + +**STEP 3 - Add a client secret for application in Microsoft Entra ID** + + Sometimes called an application password, a client secret is a string value required for the execution of Vectra Data Connector. Follow the steps in this section to create a new Client Secret: + 1. In the Azure portal, in **App registrations**, select your application. + 2. Select **Certificates & secrets > Client secrets > New client secret**. + 3. Add a description for your client secret. + 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months. + 5. Select **Add**. + 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Vectra Data Connector. + +> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret) + +**STEP 4 - Get Object ID of your application in Microsoft Entra ID** + + After creating your app registration, follow the steps in this section to get Object ID: + 1. Go to **Microsoft Entra ID**. + 2. Select **Enterprise applications** from the left menu. + 3. Find your newly created application in the list (you can search by the name you provided). + 4. Click on the application. + 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment. + +**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID** + + Follow the steps in this section to assign the role: + 1. In the Azure portal, Go to **Resource Group** and select your resource group. + 2. Go to **Access control (IAM)** from left panel. + 3. Click on **Add**, and then select **Add role assignment**. + 4. Select **Contributor** as role and click on next. + 5. In **Assign access to**, select `User, group, or service principal`. + 6. Click on **add members** and type **your app name** that you have created and select it. + 7. Now click on **Review + assign** and then again click on **Review + assign**. + +> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal) + +**STEP 6 - Create a Keyvault** + + Follow these instructions to create a new Keyvault. + 1. In the Azure portal, Go to **Key vaults** and click on Create. + 2. Select Subsciption, Resource Group and provide unique name of keyvault. + +**STEP 7 - Create Access Policy in Keyvault** + + Follow these instructions to create access policy in Keyvault. + 1. Go to keyvaults, select your keyvault, go to Access policies on left side panel, click on create. + 2. Select all keys & secrets permissions. Click next. + 3. In the principal section, search by application name which was generated in STEP - 2. Click next. + + **Note: **Ensure the Permission model in the Access Configuration of Key Vault is set to **'Vault access policy'** + +**STEP 8 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Vectra data connector, have the Vectra API Authorization Credentials readily available.. + +**9. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Vectra connector. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy-gov) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the below information : + Function Name + Workspace Name + Vectra Base URL (https://) + Vectra Client Id - Health + Vectra Client Secret Key - Health + Vectra Client Id - Entity Scoring + Vectra Client Secret - Entity Scoring + Vectra Client Id - Detections + Vectra Client Secret - Detections + Vectra Client Id - Audits + Vectra Client Secret - Audits + Vectra Client Id - Lockdown + Vectra Client Secret - Lockdown + Vectra Client Id - Host-Entity + Vectra Client Secret - Host-Entity + Vectra Client Id - Account-Entity + Vectra Client Secret - Account-Entity + Key Vault Name + Azure Client Id + Azure Client Secret + Tenant Id + Azure Entra ObjectID + StartTime (in MM/DD/YYYY HH:MM:SS Format) + Include Score Decrease + Audits Table Name + Detections Table Name + Entity Scoring Table Name + Lockdown Table Name + Health Table Name + Entities Table Name + Exclude Group Details From Detections + Log Level (Default: INFO) + Lockdown Schedule + Health Schedule + Detections Schedule + Audits Schedule + Entity Scoring Schedule + Entities Schedule +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**10. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the Vectra data connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-VectraXDR320-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. VECTRAXXXXX). + + e. **Select a runtime:** Choose Python 3.8 or above. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select **+ New application setting**. +3. Add each of the following application settings individually, with their respective values (case-sensitive): + Workspace ID + Workspace Key + Vectra Base URL (https://) + Vectra Client Id - Health + Vectra Client Secret Key - Health + Vectra Client Id - Entity Scoring + Vectra Client Secret - Entity Scoring + Vectra Client Id - Detections + Vectra Client Secret - Detections + Vectra Client Id - Audits + Vectra Client Secret - Audits + Vectra Client Id - Lockdown + Vectra Client Secret - Lockdown + Vectra Client Id - Host-Entity + Vectra Client Secret - Host-Entity + Vectra Client Id - Account-Entity + Vectra Client Secret - Account-Entity + Key Vault Name + Azure Client Id + Azure Client Secret + Tenant Id + StartTime (in MM/DD/YYYY HH:MM:SS Format) + Include Score Decrease + Audits Table Name + Detections Table Name + Entity Scoring Table Name + Lockdown Table Name + Health Table Name + Entities Table Name + Log Level (Default: INFO) + Lockdown Schedule + Health Schedule + Detections Schedule + Audits Schedule + Entity Scoring Schedule + Entities Schedule + logAnalyticsUri (optional) + - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `Audits_Data_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/veeam.md b/Tools/Solutions Analyzer/connector-docs/solutions/veeam.md index fe56221f3c4..c5fc33b81eb 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/veeam.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/veeam.md @@ -42,6 +42,39 @@ The connector supports integration with Veeam Backup & Replication, Veeam ONE an - **VeeamSessions_CL**: Veeam sessions +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Veeam Infrastructure Access**: Access to Veeam Backup & Replication REST API and Veeam ONE monitoring platform is required. This includes proper authentication credentials and network connectivity. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to Veeam APIs and pull data into Microsoft Sentinel custom tables. This may result in additional data ingestion costs. See the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +**STEP 1 - Select the deployment option for Veeam Data Connector and associated Azure Functions** + +>**IMPORTANT:** Before you deploy Veeam Data Connector, prepare Workspace Name (can be copied from the following). +- **Workspace Name**: `WorkspaceName` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**2. Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the Veeam data connector using an ARM Template. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVeeam%2FData%2520Connectors%2Fazuredeploy_Veeam_API_FunctionApp.json) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Microsoft Sentinel Workspace Name**. +4. Click **Review + Create**, **Create**. + | | | |--------------------------|---| | **Tables Ingested** | `VeeamAuthorizationEvents_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/virtualmetric-datastream.md b/Tools/Solutions Analyzer/connector-docs/solutions/virtualmetric-datastream.md index a6218f73b14..72bad3e0923 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/virtualmetric-datastream.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/virtualmetric-datastream.md @@ -30,6 +30,177 @@ This solution provides **3 data connector(s)**. VirtualMetric DataStream connector deploys Data Collection Rules to ingest security telemetry into Microsoft Sentinel data lake. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. + +**Custom Permissions:** +- **App Registration or Azure Managed Identity**: VirtualMetric DataStream requires an Entra ID identity to authenticate and send logs to Microsoft Sentinel data lake. You can choose between creating an App Registration with Client ID and Client Secret, or using Azure Managed Identity for enhanced security without credential management. +- **Resource Group Role Assignment**: The chosen identity (App Registration or Managed Identity) must be assigned to the resource group containing the Data Collection Endpoint with the following roles: Monitoring Metrics Publisher (for log ingestion) and Monitoring Reader (for reading stream configuration). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Configure VirtualMetric DataStream for Microsoft Sentinel data lake** + +Configure the VirtualMetric DataStream for Microsoft Sentinel data lake to send data. +**Register Application in Microsoft Entra ID (Optional)** + + **Choose your authentication method:** + +**Option A: Use Azure Managed Identity (Recommended)** +- Skip this step if you plan to use Azure Managed Identity for authentication. +- Azure Managed Identity provides a more secure authentication method without managing credentials. + +**Option B: Register a Service Principal Application** + +1. **Open the [Microsoft Entra ID page](https://entra.microsoft.com/)**: + - Click the provided link to open the **Microsoft Entra ID** registration page in a new tab. + - Ensure you are logged in with an account that has **Application Administrator** or **Global Administrator** permissions. + +2. **Create a New Application**: + - In the **Microsoft Entra ID portal**, select **App registrations** from the left-hand navigation. + - Click on **+ New registration**. + - Fill out the following fields: + - **Name**: Enter a descriptive name for the app (e.g., "VirtualMetric ASIM Connector"). + - **Supported account types**: Choose **Accounts in this organizational directory only** (Single tenant). + - **Redirect URI**: Leave this blank. + - Click **Register** to create the application. + +3. **Copy Application and Tenant IDs**: + - Once the app is registered, note the **Application (client) ID** and **Directory (tenant) ID** from the **Overview** page. You'll need these for VirtualMetric DataStream configuration. + +4. **Create a Client Secret**: + - In the **Certificates & secrets** section, click **+ New client secret**. + - Add a description (e.g., 'VirtualMetric ASIM Secret') and set an appropriate expiration period. + - Click **Add**. + - **Copy the client secret value immediately**, as it will not be shown again. Store this securely for VirtualMetric DataStream configuration. + + **Assign Required Permissions** + + Assign the required roles to your chosen authentication method (Service Principal or Managed Identity) in the resource group. + +**For Service Principal (if you completed Step 1):** + +1. **Navigate to Your Resource Group**: + - Open the **Azure Portal** and navigate to the **Resource Group** that contains your **Log Analytics Workspace** and where **Data Collection Rules (DCRs)** will be deployed. + +2. **Assign the Monitoring Metrics Publisher Role**: + - In the **Resource Group**, click on **Access control (IAM)** from the left-hand menu. + - Click **+ Add** and select **Add role assignment**. + - In the **Role** tab, search for and select **Monitoring Metrics Publisher**. + - Click **Next** to go to the **Members** tab. + - Under **Assign access to**, select **User, group, or service principal**. + - Click **+ Select members** and search for your registered application by name or client ID. + - Select your application and click **Select**. + - Click **Review + assign** twice to complete the assignment. + +3. **Assign the Monitoring Reader Role**: + - Repeat the same process to assign the **Monitoring Reader** role: + - Click **+ Add** and select **Add role assignment**. + - In the **Role** tab, search for and select **Monitoring Reader**. + - Follow the same member selection process as above. + - Click **Review + assign** twice to complete the assignment. + +**For Azure Managed Identity:** + +1. **Create or Identify Your Managed Identity**: + - If using **System-assigned Managed Identity**: Enable it on your Azure resource (VM, App Service, etc.). + - If using **User-assigned Managed Identity**: Create one in your resource group if it doesn't exist. + +2. **Assign the Monitoring Metrics Publisher Role**: + - Follow the same steps as above, but in the **Members** tab: + - Under **Assign access to**, select **Managed identity**. + - Click **+ Select members** and choose the appropriate managed identity type and select your identity. + - Click **Select**, then **Review + assign** twice to complete. + +3. **Assign the Monitoring Reader Role**: + - Repeat the process to assign the **Monitoring Reader** role to the same managed identity. + +**Required Permission Summary:** +The assigned roles provide the following capabilities: +- **Monitoring Metrics Publisher**: Write data to Data Collection Endpoints (DCE) and send telemetry through Data Collection Rules (DCR) +- **Monitoring Reader**: Read stream configuration and access Log Analytics workspace for ASIM table ingestion + + **Deploy Azure Infrastructure** + + Deploy the required Data Collection Endpoint (DCE) and Data Collection Rules (DCR) for Microsoft Sentinel data lake tables using our ARM template. + +1. **Deploy to Azure**: + - Click the Deploy to Azure button below to automatically deploy the required infrastructure: + - [![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVirtualMetric%2520DataStream%2FData%2520Connectors%2FVirtualMetric-SentinelDataLake%2FDeployToAzure.json) + - This will take you directly to the Azure portal to start the deployment. + +2. **Configure Deployment Parameters**: + - On the custom deployment page, configure the following settings: + + **Project details:** + - **Subscription**: Select your Azure subscription from the dropdown + - **Resource group**: Select an existing resource group or click **Create new** to create a new one + + **Instance details:** + - **Region**: Select the Azure region where your Log Analytics workspace is located (e.g., West Europe) + - **Workspace**: Enter your Log Analytics workspace name + - **DCE Name**: Provide a name for the Data Collection Endpoint (e.g., "vmetric-dce") + - **DCR Name Prefix**: Provide a prefix for the Data Collection Rules (e.g., "vmetric-dcr") + +3. **Complete the Deployment**: + - Click **Review + create** to validate the template. + - Review the parameters and click **Create** to deploy the resources. + - Wait for the deployment to complete (typically takes 2-5 minutes). + +4. **Verify Deployed Resources**: + - After deployment, verify the following resources were created: + - **Data Collection Endpoint (DCE)**: Check **Azure Portal > Monitor > Data Collection Endpoints** + - **Data Collection Rules (DCRs)**: Check **Azure Portal > Monitor > Data Collection Rules** + - **Copy the DCE Logs Ingestion URI** from the DCE **Overview** page (format: `https://..ingest.monitor.azure.com`) + - **Copy the DCE Resource ID** from the DCE **Overview** page (format: `/subscriptions//resourceGroups//providers/Microsoft.Insights/dataCollectionEndpoints/`) + - For each DCR, note the **Immutable ID** from the **Overview** page - you'll need these for VirtualMetric DataStream configuration. + + **Configure VirtualMetric DataStream Integration** + + Set up VirtualMetric DataStream to send security telemetry to Microsoft Sentinel data lake tables. + +1. **Access VirtualMetric DataStream Configuration**: + - Log into your **VirtualMetric DataStream** management console. + - Navigate to **Fleet Management** > **Targets** section. + - Click **Add new target** button. + - Select **Microsoft Sentinel** target. + +2. **Configure General Settings**: + - **Name**: Enter a name for your target (e.g., "cus01-ms-sentinel") + - **Description**: Optionally provide a description for the target configuration + +3. **Configure Azure Authentication** (choose based on Step 1): + + **For Service Principal Authentication:** + - **Managed Identity for Azure**: Keep **Disabled** + - **Tenant ID**: Enter the Directory (tenant) ID from Step 1 + - **Client ID**: Enter the Application (client) ID from Step 1 + - **Client Secret**: Enter the client secret value from Step 1 + + **For Azure Managed Identity:** + - **Managed Identity for Azure**: Set to **Enabled** + +4. **Configure Stream Properties**: + - **Endpoint**: Choose your configuration method: + - **For manual stream configuration**: Enter the DCE Logs Ingestion URI (format: `https://..ingest.monitor.azure.com`) + - **For auto stream detection**: Enter the DCE Resource ID (format: `/subscriptions//resourceGroups//providers/Microsoft.Insights/dataCollectionEndpoints/`) + - **Streams**: Select **Auto** for automatic stream detection, or configure specific streams if needed + +5. **Verify Data Ingestion in Microsoft Sentinel data lake**: + - Return to your **Log Analytics Workspace** + - Run sample queries on the ASIM tables to confirm data is being received: + ```kql + ASimNetworkSessionLogs + | where TimeGenerated > ago(1h) + | take 10 + ``` + - Check the **Microsoft Sentinel Overview** dashboard for new data sources and event counts. + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/vmray.md b/Tools/Solutions Analyzer/connector-docs/solutions/vmray.md index c6c90ccdaa7..886f2196a23 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/vmray.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/vmray.md @@ -21,6 +21,42 @@ This solution provides **1 data connector(s)**. VMRayThreatIntelligence connector automatically generates and feeds threat intelligence for all submissions to VMRay, improving threat detection and incident response in Sentinel. This seamless integration empowers teams to proactively address emerging threats. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Azure Subscription**: Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group. +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **VMRay API Key** is required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the VMRay API to pull VMRay Threat IOCs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**1. Deploy VMRay Threat Intelligence Connector** + +1. Ensure you have all the required prerequisites: **Client ID**, **Tenant ID**, **Client Secret**, **VMRay API Key**, and **VMRay Base URL**. +2. To obtain the Client ID, Client Secret, and Tenant ID, [follow these instructions](https://github.com/Azure/Azure-Sentinel/tree/master/Solutions/VMRay#vmray-configurations) +3. For the **Flex Consumption Plan**, click the **Deploy to Azure** button below: + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-VMRay-azuredeployflex) + +4. For the **Premium Plan**, click the **Deploy to Azure** button below: + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-VMRay-azuredeploypremium). + | | | |--------------------------|---| | **Tables Ingested** | `ThreatIntelligenceIndicator` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/vmware-carbon-black-cloud.md b/Tools/Solutions Analyzer/connector-docs/solutions/vmware-carbon-black-cloud.md index 8559c11659d..4b560ec8da1 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/vmware-carbon-black-cloud.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/vmware-carbon-black-cloud.md @@ -25,6 +25,88 @@ This solution provides **2 data connector(s)**. The [VMware Carbon Black Cloud](https://www.vmware.com/products/carbon-black-cloud.html) via AWS S3 data connector provides the capability to ingest watchlist, alerts, auth and endpoints events via AWS S3 and stream them to ASIM normalized tables. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission. + +**Custom Permissions:** +- **Environment**: You must have the following AWS resources defined and configured: S3, Simple Queue Service (SQS), IAM roles and permissions policies +- **Environment**: You must have the a Carbon black account and required permissions to create a Data Forwarded to AWS S3 buckets. +For more details visit [Carbon Black Data Forwarder Docs](https://docs.vmware.com/en/VMware-Carbon-Black-Cloud/services/carbon-black-cloud-user-guide/GUID-E8D33F72-BABB-4157-A908-D8BBDB5AF349.html) + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +#### 1. AWS CloudFormation Deployment + To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from S3 bucket to your Log Analytics Workspace. + #### For each template, create Stack in AWS: + 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create) + 2. In AWS, choose the 'Upload a template file' option and click on 'Choose file'. Select the downloaded template + 3. Click 'Next' and 'Create stack' +- **Template 1: OpenID connect authentication deployment**: `Oidc` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Template 2: AWS Carbon Black resources deployment**: `CarbonBlack` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +When deploying 'Template 2: AWS Carbon Black resources deployment' template you'll need supply a few parameters + * **Stack Name**: A stack name of your choosing (will appear in the list of stacks in AWS) + * **Role Name**: Must begin with 'OIDC_' prefix, has a default value. + * **Bucket Name**: Bucket name of your choosing, if you already have an existing bucket paste the name here + * **CreateNewBucket**: If you already have an existing bucket that you would like to use for this connector select 'false' for this option, otherwise a bucket with the name you entered in 'Bucket Name' will be created from this stack. + * **Region**: This is the region of the AWS resources based on Carbon Black's mapping - for more information please see [Carbon Black documentation](https://developer.carbonblack.com/reference/carbon-black-cloud/integrations/data-forwarder/quick-setup/#create-a-bucket). + * **SQSQueuePrefix**: The stack create multiple queues, this prefix will be added to each one of them. + * **WorkspaceID**: Use the Workspace ID provided below. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +Once the deployment is complete - head to the 'Outputs' tab, you will see: Role ARN, S3 bucket and 4 SQS resources created. You will need those resources in the next step when configuring Carbon Black's data forwarders and the data connector. +#### 2. Carbon Black data forwarder configuration + After all AWS resources has been created you'll need to configure Carbon Black to forward the events to the AWS buckets for Microsoft Sentinel to ingest them. Follow [Carbon Black's documentation on how to create a 'Data Forwarders'](https://developer.carbonblack.com/reference/carbon-black-cloud/integrations/data-forwarder/quick-setup/#2-create-a-forwarder) Use the first recommended option. When asked to input a bucket name use the bucket created in the previous step. + You will be required to add 'S3 prefix' for each forwarder, please use this mapping: + | Event type | S3 prefix | + |-----------------|-----------| + | Alert | carbon-black-cloud-forwarder/Alerts | + | Auth Events | carbon-black-cloud-forwarder/Auth | + | Endpoint Events | carbon-black-cloud-forwarder/Endpoint | + | Watchlist Hit | carbon-black-cloud-forwarder/Watchlist | +#### 2.1. Test your data forwarder (Optional) + To validate the data forwarder is configured as expected, in Carbon Black's portal search for the data forwarder that you just created and click on 'Test Forwarder' button under the 'Actions' column, this will generate a 'HealthCheck' file in the S3 Bucket, you should see it appear immediately. +#### 3. Connect new collectors + To enable AWS S3 for Microsoft Sentinel, click the 'Add new collector' button, fill the required information, the ARN role and the SQS URL are created in step 1, note that you will need to enter the correct SQS URL and select the appropriate event type from the dropdown, for example if you want to ingest Alert events you will need to copy the Alerts SQS URL and select the 'Alerts' event type in the dropdown + +**Connector Management Interface** + +This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors. + +📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information: +- **Role ARN** +- **Queue URL** +- **Stream name** + +➕ **Add New Collector**: Click the "Add new collector" button to configure a new data collector (see configuration form below). + +🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors. + +> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation. + +**Add new controller** + +*AWS S3 connector* + +When you click the "Add new collector" button in the portal, a configuration form will open. You'll need to provide: + +*Account details* + +- **Role ARN** (required) +- **Queue URL** (required) +- **Data type** (required): Select from available options + - Alerts + - Auth Events + - Endpoint Events + - Watchlist + +> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal. + | | | |--------------------------|---| | **Tables Ingested** | `ASimAuthenticationEventLogs` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/vmware-sd-wan-and-sase.md b/Tools/Solutions Analyzer/connector-docs/solutions/vmware-sd-wan-and-sase.md index 70ad5f94fd1..6a66ade87e8 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/vmware-sd-wan-and-sase.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/vmware-sd-wan-and-sase.md @@ -21,6 +21,95 @@ This solution provides **1 data connector(s)**. The [VMware SD-WAN & SASE](https://sase.vmware.com) data connector offers the capability to ingest VMware SD-WAN and CWS events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://developer.vmware.com/apis/vmware-sase-platform/) for more information. The connector provides ability to get events which helps to examine potential network security issues, identify misconfigured network devices and monitor SD-WAN and SASE usage. If you have your own custom connector, make sure that the connector is deployed under an isolated Log Analytics Workspace first. In case of issues, questions or feature requests, please contact us via email on sase-siem-integration@vmware.com. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **api_veco_authorization**, **api_veco_fqdn** is required for REST API. [See the documentation to learn more about VMware SASE APIs](https://developer.vmware.com/apis/vmware-sase-platform/). Check all [requirements and follow the instructions](https://docs.vmware.com/en/VMware-SD-WAN/5.3/VMware-SD-WAN-Administration-Guide/GUID-2FA3763F-835C-4D10-A32B-450FEB5397D8.html) for obtaining credentials. The Function App only supports token-based API authentication. Be advised that the API Token generated will inherit the access rights of the user account under which it was generated. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the VMware Edge Cloud Orchestrator REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +**STEP 1 - Configuration steps for the VECO API** + + [Follow the instructions](https://docs.vmware.com/en/VMware-SD-WAN/5.3/VMware-SD-WAN-Administration-Guide/GUID-2FA3763F-835C-4D10-A32B-450FEB5397D8.html) to create and obtain the credentials. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function.** + +**3. Option 1 - Azure Resource Manager (ARM) Template** + +Use this method for automated deployment of the VMware SD-WAN and SASE Connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelvmwaresdwan) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter or modify the Function App, Log Analytics and Azure Monitor settings, enter the VECO FQDN (without https://, for example vco123-usvi1.velocloud.net), enter the API token created (including "Token " at the beginning of the string), and adjust your desired Function App freaquency, then deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2 - Manual Deployment of Azure Functions** + +Use the following step-by-step instructions to deploy the VMware SD-WAN and SASE Connector manually with Azure Functions (Deployment via Visual Studio Code). + +**1. Deploy a Function App** + +> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-vmwaresdwan-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. vmwsase-siemXXXXXXXXXXXXX). + + e. **Select a runtime:** Choose Python 3.10. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration. + +**2. Configure the Function App** + +1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab . +3. Check if the application has these settings defined correctly and adjust if needed: + api_veco_authorization + api_veco_fqdn + app_frequency_mins + azsa_share_connectionstring + azsa_share_name dce_endpoint + dcr_cwsdlplog_immutableid + dcr_cwshealth_immutableid + dcr_cwsweblog_immutableid + dcr_efsfwlog_immutableid + dcr_efshealth_immutableid + dcr_saseaudit_immutableid + stream_cwsdlplog + stream_cwshealth + stream_cwsweblog + stream_efsfwlog + stream_efshealth + stream_saseaudit +3. In case you made changes to application settings have been entered, make sure that you click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `VMware_CWS_DLPLogs_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/vmware-vcenter.md b/Tools/Solutions Analyzer/connector-docs/solutions/vmware-vcenter.md index ff588fcb4b5..d075052a99d 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/vmware-vcenter.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/vmware-vcenter.md @@ -21,6 +21,87 @@ This solution provides **1 data connector(s)**. The [vCenter](https://www.vmware.com/in/products/vcenter-server.html) connector allows you to easily connect your vCenter server logs with Microsoft Sentinel. This gives you more insight into your organization's data centers and improves your security operation capabilities. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Include custom pre-requisites if the connectivity requires - else delete customs**: Description for any custom pre-requisite + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias VMware vCenter and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20vCenter/Parsers/vCenter.txt), on the second line of the query, enter the hostname(s) of your VMware vCenter device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. +> 1. If you have not installed the vCenter solution from ContentHub then [Follow the steps](https://aka.ms/sentinel-vCenter-parser) to use the Kusto function alias, **vCenter** + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Follow the configuration steps below to get vCenter server logs into Microsoft Sentinel. Refer to the [Azure Monitor Documentation](https://docs.microsoft.com/azure/azure-monitor/agents/data-sources-json) for more details on these steps. + For vCenter Server logs, we have issues while parsing the data by OMS agent data using default settings. +So we advice to capture the logs into custom table **vcenter_CL** using below instructions. +1. Login to the server where you have installed OMS agent. +2. Download config file vCenter.conf + wget -v https://aka.ms/sentinel-vcenteroms-conf -O vcenter.conf +3. Copy vcenter.conf to the /etc/opt/microsoft/omsagent/**workspace_id**/conf/omsagent.d/ folder. + cp vcenter.conf /etc/opt/microsoft/omsagent/<>/conf/omsagent.d/ +4. Edit vcenter.conf as follows: + + a. vcenter.conf uses the port **22033** by default. Ensure this port is not being used by any other source on your server + + b. If you would like to change the default port for **vcenter.conf** make sure that you dont use default Azure monotoring /log analytic agent ports I.e.(For example CEF uses TCP port **25226** or **25224**) + + c. replace **workspace_id** with real value of your Workspace ID (lines 13,14,15,18) +5. Save changes and restart the Azure Log Analytics agent for Linux service with the following command: + sudo /opt/microsoft/omsagent/bin/service_control restart +6. Modify /etc/rsyslog.conf file - add below template preferably at the beginning / before directives section + + $template vcenter,"%timestamp% %hostname% %msg%\ n" + + **Note - There is no space between slash(\\) and character 'n' in above command.** + + 7. Create a custom conf file in /etc/rsyslog.d/ for example 10-vcenter.conf and add following filter conditions. + +Download config file [10-vCenter.conf](https://aka.ms/sentinel-vcenter-conf) + + With an added statement you will need to create a filter which will specify the logs coming from the vcenter server to be forwarded to the custom table. + + reference: [Filter Conditions — rsyslog 8.18.0.master documentation](https://rsyslog.readthedocs.io/en/latest/configuration/filters.html) + + Here is an example of filtering that can be defined, this is not complete and will require additional testing for each installation. + if $rawmsg contains "vcenter-server" then @@127.0.0.1:22033;vcenter + & stop + if $rawmsg contains "vpxd" then @@127.0.0.1:22033;vcenter + & stop + +8. Restart rsyslog + systemctl restart rsyslog +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Configure and connect the vCenter device(s)** + +[Follow these instructions](https://docs.vmware.com/en/VMware-vSphere/7.0/com.vmware.vsphere.monitoring.doc/GUID-9633A961-A5C3-4658-B099-B81E0512DC21.html) to configure the vCenter to forward syslog. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address. + | | | |--------------------------|---| | **Tables Ingested** | `vcenter_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/vmwareesxi.md b/Tools/Solutions Analyzer/connector-docs/solutions/vmwareesxi.md index d5a6151c3d3..9afbe371ee0 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/vmwareesxi.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/vmwareesxi.md @@ -21,6 +21,52 @@ This solution provides **1 data connector(s)**. The [VMware ESXi](https://www.vmware.com/products/esxi-and-esx.html) connector allows you to easily connect your VMWare ESXi logs with Microsoft Sentinel This gives you more insight into your organization's ESXi servers and improves your security operation capabilities. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +**Custom Permissions:** +- **VMwareESXi**: must be configured to export logs via Syslog + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias VMwareESXi and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMWareESXi/Parsers/VMwareESXi.yaml), on the second line of the query, enter the hostname(s) of your VMwareESXi device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. + 2. Select **Apply below configuration to my machines** and select the facilities and severities. + 3. Click **Save**. +- **Open Syslog settings** + +**3. Configure and connect the VMware ESXi** + +1. Follow these instructions to configure the VMWare ESXi to forward syslog: + - [VMware ESXi 3.5 and 4.x](https://kb.vmware.com/s/article/1016621) + - [VMware ESXi 5.0+](https://docs.vmware.com/en/VMware-vSphere/5.5/com.vmware.vsphere.monitoring.doc/GUID-9F67DB52-F469-451F-B6C8-DAE8D95976E7.html) +2. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address. + | | | |--------------------------|---| | **Tables Ingested** | `Syslog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/votiro.md b/Tools/Solutions Analyzer/connector-docs/solutions/votiro.md index c1d12750104..29055f95bd7 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/votiro.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/votiro.md @@ -20,6 +20,59 @@ This solution provides **1 data connector(s)**. The Votiro data connector allows you to easily connect your Votiro Event logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. Using Votiro on Microsoft Sentinel will provide you more insights into the sanitization results of files. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` +**2. Forward Common Event Format (CEF) logs to Syslog agent** + +Set Votiro Endpoints to send Syslog messages in CEF format to the Forwarder machine. Make sure you to send the logs to port 514 TCP on the Forwarder machine's IP address. + +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/watchguard-firebox.md b/Tools/Solutions Analyzer/connector-docs/solutions/watchguard-firebox.md index 548a19bc5b0..b5677baecc2 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/watchguard-firebox.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/watchguard-firebox.md @@ -21,6 +21,43 @@ This solution provides **1 data connector(s)**. WatchGuard Firebox (https://www.watchguard.com/wgrd-products/firewall-appliances and https://www.watchguard.com/wgrd-products/cloud-and-virtual-firewalls) is security products/firewall-appliances. Watchguard Firebox will send syslog to Watchguard Firebox collector agent.The agent then sends the message to the workspace. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): write permission is required. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias WatchGuardFirebox and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Watchguard%20Firebox/Parsers/WatchGuardFirebox.txt) on the second line of the query, enter the hostname(s) of your WatchGuard Firebox device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. + +**1. Install and onboard the agent for Linux** + +Typically, you should install the agent on a different computer from the one on which the logs are generated. + +> Syslog logs are collected only from **Linux** agents. +**Choose where to install the agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**2. Configure the logs to be collected** + +Configure the facilities you want to collect and their severities. + +1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**. +2. Select **Apply below configuration to my machines** and select the facilities and severities. +3. Click **Save**. +- **Open Syslog settings** + | | | |--------------------------|---| | **Tables Ingested** | `Syslog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/windows-firewall.md b/Tools/Solutions Analyzer/connector-docs/solutions/windows-firewall.md index 4ab490c52ef..9131bb3d6b8 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/windows-firewall.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/windows-firewall.md @@ -13,44 +13,8 @@ ## Data Connectors -This solution provides **2 data connector(s)**. +**This solution does not include data connectors.** -### [Windows Firewall](../connectors/windowsfirewall.md) - -**Publisher:** Microsoft - -### [Windows Firewall Events via AMA](../connectors/windowsfirewallama.md) - -**Publisher:** Microsoft - -Windows Firewall is a Microsoft Windows application that filters information coming to your system from the internet and blocking potentially harmful programs. The firewall software blocks most programs from communicating through the firewall. To stream your Windows Firewall application logs collected from your machines, use the Azure Monitor agent (AMA) to stream those logs to the Microsoft Sentinel workspace. - - - -A configured data collection endpoint (DCE) is required to be linked with the data collection rule (DCR) created for the AMA to collect logs. For this connector, a DCE is automatically created in the same region as the workspace. If you already use a DCE stored in the same region, it's possible to change the default created DCE and use your existing one through the API. DCEs can be located in your resources with **SentinelDCE** prefix in the resource name. - - - -For more information, see the following articles: - -- [Data collection endpoints in Azure Monitor](https://learn.microsoft.com/azure/azure-monitor/essentials/data-collection-endpoint-overview?tabs=portal) - -- [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2228623&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci) - -| | | -|--------------------------|---| -| **Tables Ingested** | `ASimNetworkSessionLogs` | -| **Connector Definition Files** | [template_WindowsFirewallAma.JSON](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Firewall/Data%20Connectors/template_WindowsFirewallAma.JSON) | - -[→ View full connector details](../connectors/windowsfirewallama.md) - -## Tables Reference - -This solution ingests data into **2 table(s)**: - -| Table | Used By Connectors | -|-------|-------------------| -| `ASimNetworkSessionLogs` | [Windows Firewall Events via AMA](../connectors/windowsfirewallama.md) | -| `WindowsFirewall` | [Windows Firewall](../connectors/windowsfirewall.md) | +This solution may contain other components such as analytics rules, workbooks, hunting queries, or playbooks. [← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/windows-forwarded-events.md b/Tools/Solutions Analyzer/connector-docs/solutions/windows-forwarded-events.md index faea32993aa..e18354c02f1 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/windows-forwarded-events.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/windows-forwarded-events.md @@ -13,31 +13,8 @@ ## Data Connectors -This solution provides **1 data connector(s)**. +**This solution does not include data connectors.** -### [Windows Forwarded Events](../connectors/windowsforwardedevents.md) - -**Publisher:** Microsoft - -You can stream all Windows Event Forwarding (WEF) logs from the Windows Servers connected to your Microsoft Sentinel workspace using Azure Monitor Agent (AMA). - - This connection enables you to view dashboards, create custom alerts, and improve investigation. - - This gives you more insight into your organization’s network and improves your security operation capabilities. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2219963&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -| | | -|--------------------------|---| -| **Tables Ingested** | `WindowsEvent` | -| **Connector Definition Files** | [WindowsForwardedEvents.JSON](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Forwarded%20Events/Data%20Connectors/WindowsForwardedEvents.JSON) | - -[→ View full connector details](../connectors/windowsforwardedevents.md) - -## Tables Reference - -This solution ingests data into **1 table(s)**: - -| Table | Used By Connectors | -|-------|-------------------| -| `WindowsEvent` | [Windows Forwarded Events](../connectors/windowsforwardedevents.md) | +This solution may contain other components such as analytics rules, workbooks, hunting queries, or playbooks. [← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/windows-security-events.md b/Tools/Solutions Analyzer/connector-docs/solutions/windows-security-events.md index 63f871174cb..2bb8fd95e47 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/windows-security-events.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/windows-security-events.md @@ -13,31 +13,8 @@ ## Data Connectors -This solution provides **2 data connector(s)**. +**This solution does not include data connectors.** -### [Security Events via Legacy Agent](../connectors/securityevents.md) - -**Publisher:** Microsoft - -### [Windows Security Events via AMA](../connectors/windowssecurityevents.md) - -**Publisher:** Microsoft - -You can stream all security events from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization’s network and improves your security operation capabilities. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220225&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -| | | -|--------------------------|---| -| **Tables Ingested** | `SecurityEvent` | -| **Connector Definition Files** | [template_WindowsSecurityEvents.JSON](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Security%20Events/Data%20Connectors/template_WindowsSecurityEvents.JSON) | - -[→ View full connector details](../connectors/windowssecurityevents.md) - -## Tables Reference - -This solution ingests data into **1 table(s)**: - -| Table | Used By Connectors | -|-------|-------------------| -| `SecurityEvent` | [Security Events via Legacy Agent](../connectors/securityevents.md), [Windows Security Events via AMA](../connectors/windowssecurityevents.md) | +This solution may contain other components such as analytics rules, workbooks, hunting queries, or playbooks. [← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/windows-server-dns.md b/Tools/Solutions Analyzer/connector-docs/solutions/windows-server-dns.md index edbbba4b702..38414272f6c 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/windows-server-dns.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/windows-server-dns.md @@ -13,52 +13,8 @@ ## Data Connectors -This solution provides **2 data connector(s)**. +**This solution does not include data connectors.** -### [Windows DNS Events via AMA](../connectors/asimdnsactivitylogs.md) - -**Publisher:** Microsoft - -### [DNS](../connectors/dns.md) - -**Publisher:** Microsoft - -The DNS log connector allows you to easily connect your DNS analytic and audit logs with Microsoft Sentinel, and other related data, to improve investigation. - - - -**When you enable DNS log collection you can:** - -- Identify clients that try to resolve malicious domain names. - -- Identify stale resource records. - -- Identify frequently queried domain names and talkative DNS clients. - -- View request load on DNS servers. - -- View dynamic DNS registration failures. - - - -For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220127&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci). - -| | | -|--------------------------|---| -| **Tables Ingested** | `DnsEvents` | -| | `DnsInventory` | -| **Connector Definition Files** | [template_DNS.JSON](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Server%20DNS/Data%20Connectors/template_DNS.JSON) | - -[→ View full connector details](../connectors/dns.md) - -## Tables Reference - -This solution ingests data into **3 table(s)**: - -| Table | Used By Connectors | -|-------|-------------------| -| `ASimDnsActivityLogs` | [Windows DNS Events via AMA](../connectors/asimdnsactivitylogs.md) | -| `DnsEvents` | [DNS](../connectors/dns.md) | -| `DnsInventory` | [DNS](../connectors/dns.md) | +This solution may contain other components such as analytics rules, workbooks, hunting queries, or playbooks. [← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/wirex-network-forensics-platform.md b/Tools/Solutions Analyzer/connector-docs/solutions/wirex-network-forensics-platform.md index 1be13e43bd1..9e206f8997f 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/wirex-network-forensics-platform.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/wirex-network-forensics-platform.md @@ -25,6 +25,63 @@ This solution provides **2 data connector(s)**. The WireX Systems data connector allows security professional to integrate with Microsoft Sentinel to allow you to further enrich your forensics investigations; to not only encompass the contextual content offered by WireX but to analyze data from other sources, and to create custom dashboards to give the most complete picture during a forensic investigation and to create custom workflows. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1. Kindly follow the steps to configure the data connector** + +**Step A. Configure the Common Event Format (CEF) via AMA data connector** + + _Note:- CEF logs are collected only from Linux Agents_ + +1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade . + +2. Search for 'Common Event Format (CEF) via AMA' data connector and open it. + +3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule) + + _Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_ + +4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine + + **Step B. Forward Common Event Format (CEF) logs to Syslog agent** + + Contact WireX support (https://wirexsystems.com/contact-us/) in order to configure your NFP solution to send Syslog messages in CEF format to the proxy machine. Make sure that they central manager can send the logs to port 514 TCP on the machine's IP address. + + **Step C. Validate connection** + + Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + + 1. Make sure that you have Python on your machine using the following command: python -version + +2. You must have elevated permissions (sudo) on your machine + - **Run the following command to validate your connectivity:**: `sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef` + +**2. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/withsecureelementsviaconnector.md b/Tools/Solutions Analyzer/connector-docs/solutions/withsecureelementsviaconnector.md index fd37f63a250..7e63b11a64e 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/withsecureelementsviaconnector.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/withsecureelementsviaconnector.md @@ -28,6 +28,81 @@ It requires deploying "Elements Connector" either on-prem or in cloud. The Common Event Format (CEF) provides natively search & correlation, alerting and threat intelligence enrichment for each data log. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Linux Syslog agent configuration** + +Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel. + +> Notice that the data from all regions will be stored in the selected workspace +**1.1 Select or create a Linux machine** + + Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your WithSecurity solution and Sentinel. The machine can be on-prem environment, Microsoft Azure or other cloud based. +> Linux needs to have `syslog-ng` and `python`/`python3` installed. + + **1.2 Install the CEF collector on the Linux machine** + + Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP. + +> 1. Make sure that you have Python on your machine using the following command: python -version. + +> 2. You must have elevated permissions (sudo) on your machine. + - **Run the following command to install and apply the CEF collector:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}` + - **For python3 use command below:**: `sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python3 cef_installer.py {0} {1}` +**2. Forward data from WithSecure Elements Connector to Syslog agent** + +This describes how to install and configure Elements Connector step by step. +**2.1 Order Connector subscription** + + If Connector subscription has not been ordered yet go to EPP in Elements Portal. Then navigate to Downloads and in Elements Connector section click 'Create subscription key' button. You can check Your subscription key in Subscriptions. + + **2.2 Download Connector** + + Go to Downloads and in WithSecure Elements Connector section select correct installer. + + **2.3 Create management API key** + + When in EPP open account settings in top right corner. Then select Get management API key. If key has been created earlier it can be read there as well. + + **2.4 Install Connector** + + To install Elements Connector follow [Elements Connector Docs](https://www.withsecure.com/userguides/product.html#business/connector/latest/en/). + + **2.5 Configure event forwarding** + + If api access has not been configured during installation follow [Configuring API access for Elements Connector](https://www.withsecure.com/userguides/product.html#business/connector/latest/en/task_F657F4D0F2144CD5913EE510E155E234-latest-en). +Then go to EPP, then Profiles, then use For Connector from where you can see the connector profiles. Create a new profile (or edit an existing not read-only profile). In Event forwarding enable it. SIEM system address: **127.0.0.1:514**. Set format to **Common Event Format**. Protocol is **TCP**. Save profile and assign it to Elements Connector in Devices tab. +**3. Validate connection** + +Follow the instructions to validate your connectivity: + +Open Log Analytics to check if the logs are received using the CommonSecurityLog schema. + +>It may take about 20 minutes until the connection streams data to your workspace. + +If the logs are not received, run the following connectivity validation script: + +> 1. Make sure that you have Python on your machine using the following command: python -version + +>2. You must have elevated permissions (sudo) on your machine +- **Run the following command to validate your connectivity:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}` +- **For python3 use command below:**: `sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python3 cef_troubleshoot.py {0}` + +**4. Secure your machine** + +Make sure to configure the machine's security according to your organization's security policy + + +[Learn more >](https://aka.ms/SecureCEF) + | | | |--------------------------|---| | **Tables Ingested** | `CommonSecurityLog` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/withsecureelementsviafunction.md b/Tools/Solutions Analyzer/connector-docs/solutions/withsecureelementsviafunction.md index b54f554f000..e07e4e2eb7b 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/withsecureelementsviafunction.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/withsecureelementsviafunction.md @@ -42,6 +42,49 @@ With this solution Azure Function is deployed to your tenant, polling periodical For more information visit our website at: [https://www.withsecure.com](https://www.withsecure.com). +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **WithSecure Elements API client credentials**: Client credentials are required. [See the documentation to learn more.](https://connect.withsecure.com/getting-started/elements#getting-client-credentials) + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Create WithSecure Elements API credentials** + +Follow the [user guide](https://connect.withsecure.com/getting-started/elements#getting-client-credentials) to create Elements API credentials. Save credentials in a safe place. + +**2. Create Microsoft Entra application** + +Create new Microsoft Entra application and credentials. Follow [the instructions](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-microsoft-entra-application) and store values of **Directory (tenant) ID**, **Object ID**, **Application (client) ID** and **Client Secret** (from client credentials field). Remember to store Client Secret in a safe place. + +**3. Deploy Function App** + +>**NOTE:** This connector uses Azure Functions to pull logs from WithSecure Elements. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store Microsoft Entra client credentials and WithSecure Elements API client credentials in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +>**IMPORTANT:** Before deploying the WithSecure Elements connector, have the Workspace Name (can be copied from the following), data from Microsoft Entra (Directory (tenant) ID, Object ID, Application (client) ID and Client Secret), as well as the WithSecure Elements client credentials, readily available. +- **Workspace Name**: `workspaceName` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**1. Deploy all the resources related to the connector** + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-WithSecureElementsViaFunction-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the **Workspace ID**, **Entra Client ID**, **Entra Client Secret**, **Entra Tenant ID**, **Elements API Client ID**, **Elements API Client Secret**. +>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. +4. You can also fill in optional fields: **Elements API url**, **Engine**, **Engine Group**. Use default value of **Elements API url** unless you have some special case. **Engine** and **Engine Group** map to [security events request parameters](https://connect.withsecure.com/api-reference/elements#post-/security-events/v1/security-events), fill in those parameters if you are interested only in events from specific engine or engine group, in case you want to receive all security events leave the fields with default values. +5. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +6. Click **Purchase** to deploy. + | | | |--------------------------|---| | **Tables Ingested** | `WsSecurityEvents_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/wiz.md b/Tools/Solutions Analyzer/connector-docs/solutions/wiz.md index 862c6d7a02a..c2e24fd01b2 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/wiz.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/wiz.md @@ -21,6 +21,56 @@ This solution provides **1 data connector(s)**. The Wiz connector allows you to easily send Wiz Issues, Vulnerability Findings, and Audit logs to Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Wiz Service Account credentials**: Ensure you have your Wiz service account client ID and client secret, API endpoint URL, and auth URL. Instructions can be found on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector: Uses Azure Functions to connect to Wiz API to pull Wiz Issues, Vulnerability Findings, and Audit Logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. +Creates an Azure Key Vault with all the required parameters stored as secrets. + +**1. STEP 1 - Get your Wiz credentials** + +Follow the instructions on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz) to get the erquired credentials. + +**2. STEP 2 - Deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Wiz Connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Wiz credentials from the previous step. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Option 1: Deploy using the Azure Resource Manager (ARM) Template** + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-wiz-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +3. Enter the following parameters: +> - Choose **KeyVaultName** and **FunctionName** for the new resources + >- Enter the following Wiz credentials from step 1: **WizAuthUrl**, **WizEndpointUrl**, **WizClientId**, and **WizClientSecret** +>- Enter the Workspace credentials **AzureLogsAnalyticsWorkspaceId** and **AzureLogAnalyticsWorkspaceSharedKey** +>- Choose the Wiz data types you want to send to Microsoft Sentinel, choose at least one from **Wiz Issues**, **Vulnerability Findings**, and **Audit Logs**. + +>- (optional) follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#optional-create-a-filter-for-wiz-queries) to add **IssuesQueryFilter**, **VulnerbailitiesQueryFilter**, and **AuditLogsQueryFilter**. + +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. + +**4. Option 2: Manual Deployment of the Azure Function** + +>Follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#manual-deployment) to deploy the connector manually. + | | | |--------------------------|---| | **Tables Ingested** | `WizAuditLogsV2_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/workday.md b/Tools/Solutions Analyzer/connector-docs/solutions/workday.md index db149e5d54a..f6003536fa8 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/workday.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/workday.md @@ -21,6 +21,38 @@ This solution provides **1 data connector(s)**. The [Workday](https://www.workday.com/) User Activity data connector provides the capability to ingest User Activity Logs from [Workday API](https://community.workday.com/sites/default/files/file-hosting/restapi/index.html#privacy/v1/get-/activityLogging) into Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): Read and Write permissions are required. + +**Custom Permissions:** +- **Workday User Activity API access**: Access to the Workday user activity API through Oauth are required. The API Client needs to have the scope: System and it needs to be authorized by an account with System Auditing permissions. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect to Workday to start collecting user activity logs in Microsoft Sentinel** + +1) In Workday, access the "Edit Tenant Setup - Security" task, verify "OAuth 2.0 Settings" section, make sure that the "OAuth 2.0 Clients Enabled" check box is ticked. + 2) In Workday, access the "Edit Tenant Setup - System" task, verify "User Activity Logging" section, make sure that the "Enable User Activity Logging" check box is ticked. + 3) In Workday, access the "Register API Client" task. + 4) Define the Client Name, select the "Client Grant Type": "Authorization Code Grant" and then select "Access Token Type": "Bearer" + 5) Enter the "Redirection URI": https://portal.azure.com/TokenAuthorize/ExtensionName/Microsoft_Azure_Security_Insights + 6) In section "Scope (Functional Areas)", select "System" and click OK at the bottom + 7) Copy the Client ID and Client Secret before navigating away from the page, and store it securely. + 8) In Sentinel, in the connector page - provide required Token, Authorization and User Activity Logs Endpoints, along with Client ID and Client Secret from previous step. Then click "Connect". + 9) A Workday pop up will appear to complete the OAuth2 authentication and authorization of the API client. Here you need to provide credentials for Workday account with "System Auditing" permissions in Workday (can be either Workday account or Integration System User). + 10) Once that's complete, the message will be displayed to authorize your API client +- **Token Endpoint**: https://wd2-impl-services1.workday.com/ccx/oauth2/{tenantName}/token +- **Authorization Endpoint**: https://impl.workday.com/{tenantName}/authorize +- **User Activity Logs Endpoint, it ends with /activityLogging **: https://wd2-impl-services1.workday.com/ccx/api/privacy/v1/{tenantName}/activityLogging +- **OAuth Configuration**: + - Client ID + - Client Secret + - Click 'Connect' to authenticate + | | | |--------------------------|---| | **Tables Ingested** | `ASimAuditEventLogs` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/workplace-from-facebook.md b/Tools/Solutions Analyzer/connector-docs/solutions/workplace-from-facebook.md index 89679865d15..0d9edcc3532 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/workplace-from-facebook.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/workplace-from-facebook.md @@ -21,6 +21,89 @@ This solution provides **1 data connector(s)**. The [Workplace](https://www.workplace.com/) data connector provides the capability to ingest common Workplace events into Microsoft Sentinel through Webhooks. Webhooks enable custom integration apps to subscribe to events in Workplace and receive updates in real time. When a change occurs in Workplace, an HTTPS POST request with event information is sent to a callback data connector URL. Refer to [Webhooks documentation](https://developers.facebook.com/docs/workplace/reference/webhooks) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **Webhooks Credentials/permissions**: WorkplaceAppSecret, WorkplaceVerifyToken, Callback URL are required for working Webhooks. See the documentation to learn more about [configuring Webhooks](https://developers.facebook.com/docs/workplace/reference/webhooks), [configuring permissions](https://developers.facebook.com/docs/workplace/reference/permissions). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector uses Azure Functions based on HTTP Trigger for waiting POST requests with logs to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias WorkplaceFacebook and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Workplace%20from%20Facebook/Parsers/Workplace_Facebook.txt) on the second line of the query, enter the hostname(s) of your Workplace Facebook device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. + +**STEP 1 - Configuration steps for the Workplace** + + Follow the instructions to configure Webhooks. + +1. Log in to the Workplace with Admin user credentials. +2. In the Admin panel, click **Integrations**. +3. In the **All integrations** view, click **Create custom integration** +4. Enter the name and description and click **Create**. +5. In the **Integration details** panel show **App secret** and copy. +6. In the **Integration permissions** pannel set all read permissions. Refer to [permission page](https://developers.facebook.com/docs/workplace/reference/permissions) for details. +7. Now proceed to STEP 2 to follow the steps (listed in Option 1 or 2) to Deploy the Azure Function. +8. Enter the requested parameters and also enter a Token of choice. Copy this Token / Note it for the upcoming step. +9. After the deployment of Azure Functions completes successfully, open Function App page, select your app, go to **Functions**, click **Get Function URL** and copy this / Note it for the upcoming step. +10. Go back to Workplace from Facebook. In the **Configure webhooks** panel on each Tab set **Callback URL** as the same value that you copied in point 9 above and Verify token as the same + value you copied in point 8 above which was obtained during STEP 2 of Azure Functions deployment. +11. Click Save. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions** + +>**IMPORTANT:** Before deploying the Workplace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Option 1 - Azure Resource Manager (ARM) Template** + + Use this method for automated deployment of the Workplace data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-WorkplaceFacebook-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-WorkplaceFacebook-azuredeploy-gov) +2. Select the preferred **Subscription**, **Resource Group** and **Location**. +> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. +3. Enter the **WorkplaceVerifyToken** (can be any expression, copy and save it for STEP 1), **WorkplaceAppSecret** and deploy. +4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. +5. Click **Purchase** to deploy. +6. After deploying open Function App page, select your app, go to the **Functions** and click **Get Function Url** copy it and follow p.7 from STEP 1. + + **Option 2 - Manual Deployment of Azure Functions** + + Use the following step-by-step instructions to deploy the Sophos Endpoint Protection data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-WorkplaceFacebook-functionapp) file. Extract archive to your local development computer. +2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode. +3. After successful deployment of the function app, follow next steps for configuring it. + + **Step 2 - Configure the Function App** + + 1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select ** New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + WorkplaceAppSecret + WorkplaceVerifyToken + WorkspaceID + WorkspaceKey + logAnalyticsUri (optional) +> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `Workplace_Facebook_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/zerofox.md b/Tools/Solutions Analyzer/connector-docs/solutions/zerofox.md index 18fa3594a6f..5ffca98bbbf 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/zerofox.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/zerofox.md @@ -25,6 +25,49 @@ This solution provides **2 data connector(s)**. The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **ZeroFox API Credentials/permissions**: **ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**STEP 1 - Retrieval of ZeroFox credentials:** + + Follow these instructions for set up logging and obtain credentials. +1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password +2 - Click into the Settings button and go to the Data Connectors Section. +3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username. + +**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: ** + +>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available. +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**3. Preparing resources for deployment.** + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy) +2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. +3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token** +4. +5. Click **Review + Create** to deploy. + | | | |--------------------------|---| | **Tables Ingested** | `ZeroFox_CTI_C2_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/zeronetworks.md b/Tools/Solutions Analyzer/connector-docs/solutions/zeronetworks.md index 8e2976ad520..998d8e26ecd 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/zeronetworks.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/zeronetworks.md @@ -22,6 +22,24 @@ This solution provides **1 data connector(s)**. The [Zero Networks Segment](https://zeronetworks.com/) Audit data connector provides the capability to ingest Zero Networks Audit events into Microsoft Sentinel through the REST API. This data connector uses Microsoft Sentinel native polling capability. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key) + +**Custom Permissions:** +- **Zero Networks API Token**: **ZeroNetworksAPIToken** is required for REST API. See the API Guide and follow the instructions for obtaining credentials. + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Connect Zero Networks to Microsoft Sentinel** + +Enable Zero Networks audit Logs. +> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `APIKey`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step. + | | | |--------------------------|---| | **Tables Ingested** | `ZNSegmentAuditNativePoller_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/zimperium-mobile-threat-defense.md b/Tools/Solutions Analyzer/connector-docs/solutions/zimperium-mobile-threat-defense.md index 7fee13e5753..365624b53c5 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/zimperium-mobile-threat-defense.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/zimperium-mobile-threat-defense.md @@ -21,6 +21,33 @@ This solution provides **1 data connector(s)**. Zimperium Mobile Threat Defense connector gives you the ability to connect the Zimperium threat log with Microsoft Sentinel to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's mobile threat landscape and enhances your security operation capabilities. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +**1. Configure and connect Zimperium MTD** + +1. In zConsole, click **Manage** on the navigation bar. +2. Click the **Integrations** tab. +3. Click the **Threat Reporting** button and then the **Add Integrations** button. +4. Create the Integration: + - From the available integrations, select Microsoft Sentinel. + - Enter your workspace id and primary key from the fields below, click **Next**. + - Fill in a name for your Microsoft Sentinel integration. + - Select a Filter Level for the threat data you wish to push to Microsoft Sentinel. + - Click **Finish** +5. For additional instructions, please refer to the [Zimperium customer support portal](https://support.zimperium.com). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `ZimperiumMitigationLog_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/zoomreports.md b/Tools/Solutions Analyzer/connector-docs/solutions/zoomreports.md index 37db1754ead..dfa8ce2367e 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/zoomreports.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/zoomreports.md @@ -21,6 +21,94 @@ This solution provides **1 data connector(s)**. The [Zoom](https://zoom.us/) Reports data connector provides the capability to ingest [Zoom Reports](https://developers.zoom.us/docs/api/rest/reference/zoom-api/methods/#tag/Reports) events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://developers.zoom.us/docs/api/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions on the workspace are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Custom Permissions:** +- **Microsoft.Web/sites permissions**: Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/). +- **REST API Credentials/permissions**: **AccountID**, **ClientID** and **ClientSecret** are required for Zoom API. [See the documentation to learn more about Zoom API](https://developers.zoom.us/docs/internal-apps/create/). [Follow the instructions for Zoom API configurations](https://aka.ms/sentinel-zoomreports-readme). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This connector uses Azure Functions to connect to the Zoom API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details. + +>**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App. + +**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Zoom and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZoomReports/Parsers/Zoom.yaml). The function usually takes 10-15 minutes to activate after solution installation/update. + +**STEP 1 - Configuration steps for the Zoom API** + + [Follow the instructions](https://developers.zoom.us/docs/internal-apps/create/) to obtain the credentials. + +**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function** + +>**IMPORTANT:** Before deploying the Zoom Reports data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following). +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +**Option 1 - Azure Resource Manager (ARM) Template** + + Use this method for automated deployment of the Zoom Audit data connector using an ARM Tempate. + +1. Click the **Deploy to Azure** button below. + + [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ZoomAPI-azuredeployV2) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-ZoomAPI-azuredeployV2-gov) +2. Select the preferred **Subscription**, **Resource Group** and **Region**. +> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. +3. Enter the **AccountID**, **ClientID**, **ClientSecret**, **WorkspaceID**, **WorkspaceKey**, **Function Name** and click Review + create. +4. Finally click **Create** to deploy. + + **Option 2 - Manual Deployment of Azure Functions** + + Use the following step-by-step instructions to deploy the Zoom Reports data connector manually with Azure Functions (Deployment via Visual Studio Code). +**Step 1 - Deploy a Function App** + + **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development. + +1. Download the [Azure Function App](https://aka.ms/sentinel-ZoomAPI-functionapp) file. Extract archive to your local development computer. +2. Start VS Code. Choose File in the main menu and select Open Folder. +3. Select the top level folder from extracted files. +4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button. +If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure** +If you're already signed in, go to the next step. +5. Provide the following information at the prompts: + + a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app. + + b. **Select Subscription:** Choose the subscription to use. + + c. Select **Create new Function App in Azure** (Don't choose the Advanced option) + + d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ZoomXXXXX). + + e. **Select a runtime:** Choose Python 3.11. + + f. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located. + +6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied. +7. Go to Azure Portal for the Function App configuration + + **Step 2 - Configure the Function App** + + 1. In the Function App, select the Function App Name and select **Configuration**. +2. In the **Application settings** tab, select ** New application setting**. +3. Add each of the following application settings individually, with their respective string values (case-sensitive): + AccountID + ClientID + ClientSecret + WorkspaceID + WorkspaceKey + logAnalyticsUri (optional) + Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. +4. Once all application settings have been entered, click **Save**. + | | | |--------------------------|---| | **Tables Ingested** | `Zoom_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/zscaler-internet-access.md b/Tools/Solutions Analyzer/connector-docs/solutions/zscaler-internet-access.md index 4e5c54ecf15..4889f864ccb 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/zscaler-internet-access.md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/zscaler-internet-access.md @@ -13,31 +13,8 @@ ## Data Connectors -This solution provides **2 data connector(s)**. +**This solution does not include data connectors.** -### [[Deprecated] Zscaler via Legacy Agent](../connectors/zscaler.md) - -**Publisher:** Zscaler - -### [[Deprecated] Zscaler via AMA](../connectors/zscalerama.md) - -**Publisher:** Zscaler - -The Zscaler data connector allows you to easily connect your Zscaler Internet Access (ZIA) logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. Using Zscaler on Microsoft Sentinel will provide you more insights into your organization’s Internet usage, and will enhance its security operation capabilities.​ - -| | | -|--------------------------|---| -| **Tables Ingested** | `CommonSecurityLog` | -| **Connector Definition Files** | [template_ZscalerAma.JSON](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zscaler%20Internet%20Access/Data%20Connectors/template_ZscalerAma.JSON) | - -[→ View full connector details](../connectors/zscalerama.md) - -## Tables Reference - -This solution ingests data into **1 table(s)**: - -| Table | Used By Connectors | -|-------|-------------------| -| `CommonSecurityLog` | [[Deprecated] Zscaler via AMA](../connectors/zscalerama.md), [[Deprecated] Zscaler via Legacy Agent](../connectors/zscaler.md) | +This solution may contain other components such as analytics rules, workbooks, hunting queries, or playbooks. [← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/zscaler-private-access-(zpa).md b/Tools/Solutions Analyzer/connector-docs/solutions/zscaler-private-access-(zpa).md index d3d8ef699aa..cbf7942d0b0 100644 --- a/Tools/Solutions Analyzer/connector-docs/solutions/zscaler-private-access-(zpa).md +++ b/Tools/Solutions Analyzer/connector-docs/solutions/zscaler-private-access-(zpa).md @@ -21,6 +21,72 @@ This solution provides **1 data connector(s)**. The [Zscaler Private Access (ZPA)](https://help.zscaler.com/zpa/what-zscaler-private-access) data connector provides the capability to ingest [Zscaler Private Access events](https://help.zscaler.com/zpa/log-streaming-service) into Microsoft Sentinel. Refer to [Zscaler Private Access documentation](https://help.zscaler.com/zpa) for more information. +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +>**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-ZscalerPrivateAccess-parser) to create the Kusto Functions alias, **ZPAEvent** + +>**NOTE:** This data connector has been developed using Zscaler Private Access version: 21.67.1 + +**1. Install and onboard the agent for Linux or Windows** + +Install the agent on the Server where the Zscaler Private Access logs are forwarded. + +> Logs from Zscaler Private Access Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents. +**Choose where to install the Linux agent:** + +**Install agent on Azure Linux Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install agent on Linux Virtual Machine** + + **Install agent on a non-Azure Linux Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install agent on Linux (Non-Azure)** + +**Choose where to install the Windows agent:** + +**Install agent on Azure Windows Virtual Machine** + + Select the machine to install the agent on and then click **Connect**. + - **Install/configure: InstallAgentOnVirtualMachine** + + **Install agent on a non-Azure Windows Machine** + + Download the agent on the relevant machine and follow the instructions. + - **Install/configure: InstallAgentOnNonAzure** + +**2. Configure the logs to be collected** + +Follow the configuration steps below to get Zscaler Private Access logs into Microsoft Sentinel. Refer to the [Azure Monitor Documentation](https://docs.microsoft.com/azure/azure-monitor/agents/data-sources-json) for more details on these steps. +Zscaler Private Access logs are delivered via Log Streaming Service (LSS). Refer to [LSS documentation](https://help.zscaler.com/zpa/about-log-streaming-service) for detailed information +1. Configure [Log Receivers](https://help.zscaler.com/zpa/configuring-log-receiver). While configuring a Log Receiver, choose **JSON** as **Log Template**. +2. Download config file [zpa.conf](https://aka.ms/sentinel-ZscalerPrivateAccess-conf) + wget -v https://aka.ms/sentinel-zscalerprivateaccess-conf -O zpa.conf +3. Login to the server where you have installed Azure Log Analytics agent. +4. Copy zpa.conf to the /etc/opt/microsoft/omsagent/**workspace_id**/conf/omsagent.d/ folder. +5. Edit zpa.conf as follows: + + a. specify port which you have set your Zscaler Log Receivers to forward logs to (line 4) + + b. zpa.conf uses the port **22033** by default. Ensure this port is not being used by any other source on your server + + c. If you would like to change the default port for **zpa.conf** make sure that it should not get conflict with default AMA agent ports I.e.(For example CEF uses TCP port **25226** or **25224**) + + d. replace **workspace_id** with real value of your Workspace ID (lines 14,15,16,19) +5. Save changes and restart the Azure Log Analytics agent for Linux service with the following command: + sudo /opt/microsoft/omsagent/bin/service_control restart +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + | | | |--------------------------|---| | **Tables Ingested** | `ZPA_CL` | diff --git a/Tools/Solutions Analyzer/connector-docs/tables-index.md b/Tools/Solutions Analyzer/connector-docs/tables-index.md index 75deeddcd4b..fb42e1ac3f9 100644 --- a/Tools/Solutions Analyzer/connector-docs/tables-index.md +++ b/Tools/Solutions Analyzer/connector-docs/tables-index.md @@ -12,7 +12,7 @@ Browse all tables ingested by Microsoft Sentinel data connectors. ## Overview -This page lists **811 unique tables** ingested by connectors. +This page lists **740 unique tables** ingested by connectors. **Jump to:** [A](#a) | [B](#b) | [C](#c) | [D](#d) | [E](#e) | [F](#f) | [G](#g) | [H](#h) | [I](#i) | [J](#j) | [K](#k) | [L](#l) | [M](#m) | [N](#n) | [O](#o) | [P](#p) | [Q](#q) | [R](#r) | [S](#s) | [T](#t) | [U](#u) | [V](#v) | [W](#w) | [Z](#z) @@ -20,38 +20,29 @@ This page lists **811 unique tables** ingested by connectors. | Table | Solutions | Connectors | |-------|-----------|------------| -| `AADManagedIdentitySignInLogs` | [Microsoft Entra ID](solutions/microsoft-entra-id.md) | [Microsoft Entra ID](connectors/azureactivedirectory.md) | -| `AADNonInteractiveUserSignInLogs` | [Microsoft Entra ID](solutions/microsoft-entra-id.md) | [Microsoft Entra ID](connectors/azureactivedirectory.md) | -| `AADProvisioningLogs` | [Microsoft Entra ID](solutions/microsoft-entra-id.md) | [Microsoft Entra ID](connectors/azureactivedirectory.md) | -| `AADRiskyServicePrincipals` | [Microsoft Entra ID](solutions/microsoft-entra-id.md) | [Microsoft Entra ID](connectors/azureactivedirectory.md) | -| `AADRiskyUsers` | [Microsoft Entra ID](solutions/microsoft-entra-id.md) | [Microsoft Entra ID](connectors/azureactivedirectory.md) | -| `AADServicePrincipalRiskEvents` | [Microsoft Entra ID](solutions/microsoft-entra-id.md) | [Microsoft Entra ID](connectors/azureactivedirectory.md) | -| `AADServicePrincipalSignInLogs` | [Microsoft Entra ID](solutions/microsoft-entra-id.md) | [Microsoft Entra ID](connectors/azureactivedirectory.md) | -| `AADUserRiskEvents` | [Microsoft Entra ID](solutions/microsoft-entra-id.md) | [Microsoft Entra ID](connectors/azureactivedirectory.md) | -| `ABAPAuditLog` | [Pathlock_TDnR](solutions/pathlock-tdnr.md), [SAP S4 Cloud Public Edition](solutions/sap-s4-cloud-public-edition.md), [SecurityBridge App](solutions/securitybridge-app.md) | [Pathlock Inc.: Threat Detection and Response for SAP](connectors/pathlock-tdnr.md), [SAP S/4HANA Cloud Public Edition](connectors/saps4publicalerts.md), [SecurityBridge Solution for SAP](connectors/securitybridge.md) | +| [`ABAPAuditLog`](tables/abapauditlog.md) | [Pathlock_TDnR](solutions/pathlock-tdnr.md), [SAP S4 Cloud Public Edition](solutions/sap-s4-cloud-public-edition.md), [SecurityBridge App](solutions/securitybridge-app.md) | [Pathlock Inc.: Threat Detection and Response for SAP](connectors/pathlock-tdnr.md), [SAP S/4HANA Cloud Public Edition](connectors/saps4publicalerts.md), [SecurityBridge Solution for SAP](connectors/securitybridge.md) | | `ABNORMAL_CASES_CL` | [AbnormalSecurity](solutions/abnormalsecurity.md) | [AbnormalSecurity ](connectors/abnormalsecurity.md) | | `ABNORMAL_THREAT_MESSAGES_CL` | [AbnormalSecurity](solutions/abnormalsecurity.md) | [AbnormalSecurity ](connectors/abnormalsecurity.md) | -| `ADFSSignInLogs` | [Microsoft Entra ID](solutions/microsoft-entra-id.md) | [Microsoft Entra ID](connectors/azureactivedirectory.md) | | `ADOAuditLogs_CL` | [AzureDevOpsAuditing](solutions/azuredevopsauditing.md) | [Azure DevOps Audit Logs (via Codeless Connector Platform)](connectors/azuredevopsauditlogs.md) | | `AIShield_CL` | [AIShield AI Security Monitoring](solutions/aishield-ai-security-monitoring.md) | [AIShield](connectors/boschaishield.md) | | `AIX_Audit_CL` | [NXLogAixAudit](solutions/nxlogaixaudit.md) | [NXLog AIX Audit](connectors/nxlogaixaudit.md) | | `ARGOS_CL` | [ARGOSCloudSecurity](solutions/argoscloudsecurity.md) | [ARGOS Cloud Security](connectors/argoscloudsecurity.md) | -| `ASimAuditEventLogs` | [Cisco Meraki Events via REST API](solutions/cisco-meraki-events-via-rest-api.md), [CrowdStrike Falcon Endpoint Protection](solutions/crowdstrike-falcon-endpoint-protection.md), [Workday](solutions/workday.md) | [Cisco Meraki (using REST API)](connectors/ciscomerakimultirule.md), [CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)](connectors/crowdstrikereplicatorv2.md), [Workday User Activity](connectors/workdayccpdefinition.md) | -| `ASimAuthenticationEventLogs` | [CrowdStrike Falcon Endpoint Protection](solutions/crowdstrike-falcon-endpoint-protection.md), [VMware Carbon Black Cloud](solutions/vmware-carbon-black-cloud.md) | [CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)](connectors/crowdstrikereplicatorv2.md), [VMware Carbon Black Cloud via AWS S3](connectors/carbonblackawss3.md) | +| [`ASimAuditEventLogs`](tables/asimauditeventlogs.md) | [Cisco Meraki Events via REST API](solutions/cisco-meraki-events-via-rest-api.md), [CrowdStrike Falcon Endpoint Protection](solutions/crowdstrike-falcon-endpoint-protection.md), [Workday](solutions/workday.md) | [Cisco Meraki (using REST API)](connectors/ciscomerakimultirule.md), [CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)](connectors/crowdstrikereplicatorv2.md), [Workday User Activity](connectors/workdayccpdefinition.md) | +| [`ASimAuthenticationEventLogs`](tables/asimauthenticationeventlogs.md) | [CrowdStrike Falcon Endpoint Protection](solutions/crowdstrike-falcon-endpoint-protection.md), [VMware Carbon Black Cloud](solutions/vmware-carbon-black-cloud.md) | [CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)](connectors/crowdstrikereplicatorv2.md), [VMware Carbon Black Cloud via AWS S3](connectors/carbonblackawss3.md) | | `ASimAuthenticationEventLogs_CL` | [CrowdStrike Falcon Endpoint Protection](solutions/crowdstrike-falcon-endpoint-protection.md) | [CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)](connectors/crowdstrikereplicatorv2.md) | -| `ASimDnsActivityLogs` | [CrowdStrike Falcon Endpoint Protection](solutions/crowdstrike-falcon-endpoint-protection.md), [Windows Server DNS](solutions/windows-server-dns.md) | [Windows DNS Events via AMA](connectors/asimdnsactivitylogs.md), [CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)](connectors/crowdstrikereplicatorv2.md) | -| `ASimFileEventLogs` | [CrowdStrike Falcon Endpoint Protection](solutions/crowdstrike-falcon-endpoint-protection.md), [VMware Carbon Black Cloud](solutions/vmware-carbon-black-cloud.md) | [CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)](connectors/crowdstrikereplicatorv2.md), [VMware Carbon Black Cloud via AWS S3](connectors/carbonblackawss3.md) | +| `ASimDnsActivityLogs` | [CrowdStrike Falcon Endpoint Protection](solutions/crowdstrike-falcon-endpoint-protection.md) | [CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)](connectors/crowdstrikereplicatorv2.md) | +| [`ASimFileEventLogs`](tables/asimfileeventlogs.md) | [CrowdStrike Falcon Endpoint Protection](solutions/crowdstrike-falcon-endpoint-protection.md), [VMware Carbon Black Cloud](solutions/vmware-carbon-black-cloud.md) | [CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)](connectors/crowdstrikereplicatorv2.md), [VMware Carbon Black Cloud via AWS S3](connectors/carbonblackawss3.md) | | `ASimFileEventLogs_CL` | [CrowdStrike Falcon Endpoint Protection](solutions/crowdstrike-falcon-endpoint-protection.md) | [CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)](connectors/crowdstrikereplicatorv2.md) | -| `ASimNetworkSessionLogs` | [Cisco Meraki Events via REST API](solutions/cisco-meraki-events-via-rest-api.md), [CrowdStrike Falcon Endpoint Protection](solutions/crowdstrike-falcon-endpoint-protection.md), [VMware Carbon Black Cloud](solutions/vmware-carbon-black-cloud.md) +1 more | [Cisco Meraki (using REST API)](connectors/ciscomerakimultirule.md), [CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)](connectors/crowdstrikereplicatorv2.md), [Windows Firewall Events via AMA](connectors/windowsfirewallama.md), [VMware Carbon Black Cloud via AWS S3](connectors/carbonblackawss3.md) | -| `ASimProcessEventLogs` | [CrowdStrike Falcon Endpoint Protection](solutions/crowdstrike-falcon-endpoint-protection.md), [VMware Carbon Black Cloud](solutions/vmware-carbon-black-cloud.md) | [CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)](connectors/crowdstrikereplicatorv2.md), [VMware Carbon Black Cloud via AWS S3](connectors/carbonblackawss3.md) | +| [`ASimNetworkSessionLogs`](tables/asimnetworksessionlogs.md) | [Cisco Meraki Events via REST API](solutions/cisco-meraki-events-via-rest-api.md), [CrowdStrike Falcon Endpoint Protection](solutions/crowdstrike-falcon-endpoint-protection.md), [VMware Carbon Black Cloud](solutions/vmware-carbon-black-cloud.md) | [Cisco Meraki (using REST API)](connectors/ciscomerakimultirule.md), [CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)](connectors/crowdstrikereplicatorv2.md), [VMware Carbon Black Cloud via AWS S3](connectors/carbonblackawss3.md) | +| [`ASimProcessEventLogs`](tables/asimprocesseventlogs.md) | [CrowdStrike Falcon Endpoint Protection](solutions/crowdstrike-falcon-endpoint-protection.md), [VMware Carbon Black Cloud](solutions/vmware-carbon-black-cloud.md) | [CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)](connectors/crowdstrikereplicatorv2.md), [VMware Carbon Black Cloud via AWS S3](connectors/carbonblackawss3.md) | | `ASimProcessEventLogs_CL` | [CrowdStrike Falcon Endpoint Protection](solutions/crowdstrike-falcon-endpoint-protection.md) | [CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)](connectors/crowdstrikereplicatorv2.md) | -| `ASimRegistryEventLogs` | [CrowdStrike Falcon Endpoint Protection](solutions/crowdstrike-falcon-endpoint-protection.md), [VMware Carbon Black Cloud](solutions/vmware-carbon-black-cloud.md) | [CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)](connectors/crowdstrikereplicatorv2.md), [VMware Carbon Black Cloud via AWS S3](connectors/carbonblackawss3.md) | +| [`ASimRegistryEventLogs`](tables/asimregistryeventlogs.md) | [CrowdStrike Falcon Endpoint Protection](solutions/crowdstrike-falcon-endpoint-protection.md), [VMware Carbon Black Cloud](solutions/vmware-carbon-black-cloud.md) | [CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)](connectors/crowdstrikereplicatorv2.md), [VMware Carbon Black Cloud via AWS S3](connectors/carbonblackawss3.md) | | `ASimRegistryEventLogs_CL` | [CrowdStrike Falcon Endpoint Protection](solutions/crowdstrike-falcon-endpoint-protection.md) | [CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)](connectors/crowdstrikereplicatorv2.md) | | `ASimUserManagementActivityLogs` | [CrowdStrike Falcon Endpoint Protection](solutions/crowdstrike-falcon-endpoint-protection.md) | [CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)](connectors/crowdstrikereplicatorv2.md) | | `ASimUserManagementLogs_CL` | [CrowdStrike Falcon Endpoint Protection](solutions/crowdstrike-falcon-endpoint-protection.md) | [CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)](connectors/crowdstrikereplicatorv2.md) | | `ASimWebSessionLogs` | [Cisco Meraki Events via REST API](solutions/cisco-meraki-events-via-rest-api.md) | [Cisco Meraki (using REST API)](connectors/ciscomerakimultirule.md) | | `AWSCloudFront_AccessLog_CL` | [AWS CloudFront](solutions/aws-cloudfront.md) | [Amazon Web Services CloudFront (via Codeless Connector Framework) (Preview)](connectors/awscloudfrontccpdefinition.md) | -| `AWSCloudTrail` | [Amazon Web Services](solutions/amazon-web-services.md) | [Amazon Web Services](connectors/aws.md), [Amazon Web Services S3](connectors/awss3.md) | +| [`AWSCloudTrail`](tables/awscloudtrail.md) | [Amazon Web Services](solutions/amazon-web-services.md) | [Amazon Web Services](connectors/aws.md), [Amazon Web Services S3](connectors/awss3.md) | | `AWSCloudWatch` | [Amazon Web Services](solutions/amazon-web-services.md) | [Amazon Web Services S3](connectors/awss3.md) | | `AWSGuardDuty` | [Amazon Web Services](solutions/amazon-web-services.md) | [Amazon Web Services S3](connectors/awss3.md) | | `AWSNetworkFirewallAlert` | [Amazon Web Services NetworkFirewall](solutions/amazon-web-services-networkfirewall.md) | [Amazon Web Services NetworkFirewall (via Codeless Connector Framework)](connectors/awsnetworkfirewallccpdefinition.md) | @@ -60,29 +51,18 @@ This page lists **811 unique tables** ingested by connectors. | `AWSRoute53Resolver` | [Amazon Web Services Route 53](solutions/amazon-web-services-route-53.md) | [Amazon Web Services S3 DNS Route53 (via Codeless Connector Framework)](connectors/awsroute53resolverccpdefinition.md) | | `AWSS3ServerAccess` | [AWS_AccessLogs](solutions/aws-accesslogs.md) | [AWS S3 Server Access Logs (via Codeless Connector Framework)](connectors/awss3serveraccesslogsdefinition.md) | | `AWSSecurityHubFindings` | [AWS Security Hub](solutions/aws-security-hub.md) | [AWS Security Hub Findings (via Codeless Connector Framework)](connectors/awssecurityhubfindingsccpdefinition.md) | -| `AWSVPCFlow` | [AWS VPC Flow Logs](solutions/aws-vpc-flow-logs.md), [Amazon Web Services](solutions/amazon-web-services.md) | [Amazon Web Services S3 VPC Flow Logs](connectors/awss3vpcflowlogsparquetdefinition.md), [Amazon Web Services S3](connectors/awss3.md) | +| [`AWSVPCFlow`](tables/awsvpcflow.md) | [AWS VPC Flow Logs](solutions/aws-vpc-flow-logs.md), [Amazon Web Services](solutions/amazon-web-services.md) | [Amazon Web Services S3 VPC Flow Logs](connectors/awss3vpcflowlogsparquetdefinition.md), [Amazon Web Services S3](connectors/awss3.md) | | `AWSWAF` | [Amazon Web Services](solutions/amazon-web-services.md) | [Amazon Web Services S3 WAF](connectors/awss3wafccpdefinition.md) | -| `AZFWApplicationRule` | [Azure Firewall](solutions/azure-firewall.md) | [Azure Firewall](connectors/azurefirewall.md) | -| `AZFWDnsQuery` | [Azure Firewall](solutions/azure-firewall.md) | [Azure Firewall](connectors/azurefirewall.md) | -| `AZFWFatFlow` | [Azure Firewall](solutions/azure-firewall.md) | [Azure Firewall](connectors/azurefirewall.md) | -| `AZFWFlowTrace` | [Azure Firewall](solutions/azure-firewall.md) | [Azure Firewall](connectors/azurefirewall.md) | -| `AZFWIdpsSignature` | [Azure Firewall](solutions/azure-firewall.md) | [Azure Firewall](connectors/azurefirewall.md) | -| `AZFWInternalFqdnResolutionFailure` | [Azure Firewall](solutions/azure-firewall.md) | [Azure Firewall](connectors/azurefirewall.md) | -| `AZFWNatRule` | [Azure Firewall](solutions/azure-firewall.md) | [Azure Firewall](connectors/azurefirewall.md) | -| `AZFWNetworkRule` | [Azure Firewall](solutions/azure-firewall.md) | [Azure Firewall](connectors/azurefirewall.md) | -| `AZFWThreatIntel` | [Azure Firewall](solutions/azure-firewall.md) | [Azure Firewall](connectors/azurefirewall.md) | -| `AlertEvidence` | [Microsoft Defender XDR](solutions/microsoft-defender-xdr.md) | [Microsoft Defender XDR](connectors/microsoftthreatprotection.md) | | `AliCloudActionTrailLogs_CL` | [Alibaba Cloud ActionTrail](solutions/alibaba-cloud-actiontrail.md) | [Alibaba Cloud ActionTrail (via Codeless Connector Framework)](connectors/alicloudactiontrailccpdefinition.md) | | `AlsidForADLog_CL` | [Alsid For AD](solutions/alsid-for-ad.md) | [Alsid for Active Directory](connectors/alsidforad.md) | | `Anvilogic_Alerts_CL` | [Anvilogic](solutions/anvilogic.md) | [Anvilogic](connectors/anvilogicccfdefinition.md) | | `ApacheHTTPServer_CL` | [ApacheHTTPServer](solutions/apachehttpserver.md) | [[Deprecated] Apache HTTP Server](connectors/apachehttpserver.md) | | `ApigeeX_CL` | [Google Apigee](solutions/google-apigee.md) | [[DEPRECATED] Google ApigeeX](connectors/apigeexdataconnector.md) | -| `Armis_Activities_CL` | [Armis](solutions/armis.md) | [Armis Activities](connectors/armisactivities.md), [Armis Alerts Activities](connectors/armisalertsactivities.md) | -| `Armis_Alerts_CL` | [Armis](solutions/armis.md) | [Armis Alerts](connectors/armisalerts.md), [Armis Alerts Activities](connectors/armisalertsactivities.md) | +| [`Armis_Activities_CL`](tables/armis-activities-cl.md) | [Armis](solutions/armis.md) | [Armis Activities](connectors/armisactivities.md), [Armis Alerts Activities](connectors/armisalertsactivities.md) | +| [`Armis_Alerts_CL`](tables/armis-alerts-cl.md) | [Armis](solutions/armis.md) | [Armis Alerts](connectors/armisalerts.md), [Armis Alerts Activities](connectors/armisalertsactivities.md) | | `Armis_Devices_CL` | [Armis](solutions/armis.md) | [Armis Devices](connectors/armisdevices.md) | | `Armorblox_CL` | [Armorblox](solutions/armorblox.md) | [Armorblox](connectors/armorblox.md) | | `AtlassianConfluenceNativePoller_CL` | [AtlassianConfluenceAudit](solutions/atlassianconfluenceaudit.md) | [Atlassian Confluence](connectors/atlassianconfluence.md) | -| `AuditLogs` | [Microsoft Entra ID](solutions/microsoft-entra-id.md) | [Microsoft Entra ID](connectors/azureactivedirectory.md) | | `Audit_CL` | [Mimecast](solutions/mimecast.md) | [Mimecast Audit](connectors/mimecastauditapi.md) | | `Audits_Data_CL` | [Vectra XDR](solutions/vectra-xdr.md) | [Vectra XDR](connectors/vectraxdr.md) | | `Auth0AM_CL` | [Auth0](solutions/auth0.md) | [Auth0 Access Management](connectors/auth0.md) | @@ -93,8 +73,6 @@ This page lists **811 unique tables** ingested by connectors. | `Awareness_User_Data_CL` | [Mimecast](solutions/mimecast.md) | [Mimecast Awareness Training](connectors/mimecastatapi.md) | | `Awareness_Watchlist_Details_CL` | [Mimecast](solutions/mimecast.md) | [Mimecast Awareness Training](connectors/mimecastatapi.md) | | `AzureActivity` | [Azure Activity](solutions/azure-activity.md) | [Azure Activity](connectors/azureactivity.md) | -| `AzureDiagnostics` | [Azure Batch Account](solutions/azure-batch-account.md), [Azure Cognitive Search](solutions/azure-cognitive-search.md), [Azure DDoS Protection](solutions/azure-ddos-protection.md) +11 more | [Azure Batch Account](connectors/azurebatchaccount-ccp.md), [Azure Cognitive Search](connectors/azurecognitivesearch-ccp.md), [Azure Data Lake Storage Gen1](connectors/azuredatalakestoragegen1-ccp.md), [Azure Event Hub](connectors/azureeventhub-ccp.md), [Azure Firewall](connectors/azurefirewall.md) +9 more | -| `AzureMetrics` | [Azure Storage](solutions/azure-storage.md) | [Azure Storage Account](connectors/azurestorageaccount.md) | | `agari_apdpolicy_log_CL` | [Agari](solutions/agari.md) | [Agari Phishing Defense and Brand Protection](connectors/agari.md) | | `agari_apdtc_log_CL` | [Agari](solutions/agari.md) | [Agari Phishing Defense and Brand Protection](connectors/agari.md) | | `agari_bpalerts_log_CL` | [Agari](solutions/agari.md) | [Agari Phishing Defense and Brand Protection](connectors/agari.md) | @@ -140,7 +118,7 @@ This page lists **811 unique tables** ingested by connectors. | `BitwardenGroups_CL` | [Bitwarden](solutions/bitwarden.md) | [Bitwarden Event Logs](connectors/bitwardeneventlogs.md) | | `BitwardenMembers_CL` | [Bitwarden](solutions/bitwarden.md) | [Bitwarden Event Logs](connectors/bitwardeneventlogs.md) | | `BoxEventsV2_CL` | [Box](solutions/box.md) | [Box Events (CCP)](connectors/boxeventsccpdefinition.md) | -| `BoxEvents_CL` | [Box](solutions/box.md) | [Box](connectors/boxdataconnector.md), [Box Events (CCP)](connectors/boxeventsccpdefinition.md) | +| [`BoxEvents_CL`](tables/boxevents-cl.md) | [Box](solutions/box.md) | [Box](connectors/boxdataconnector.md), [Box Events (CCP)](connectors/boxeventsccpdefinition.md) | | `barracuda_CL` | [Barracuda WAF](solutions/barracuda-waf.md) | [[Deprecated] Barracuda Web Application Firewall via Legacy Agent](connectors/barracuda.md) | | `beSECURE_Audit_CL` | [Beyond Security beSECURE](solutions/beyond-security-besecure.md) | [Beyond Security beSECURE](connectors/beyondsecuritybesecure.md) | | `beSECURE_ScanEvent_CL` | [Beyond Security beSECURE](solutions/beyond-security-besecure.md) | [Beyond Security beSECURE](connectors/beyondsecuritybesecure.md) | @@ -163,35 +141,33 @@ This page lists **811 unique tables** ingested by connectors. | `CiscoSecureEndpointAuditLogsV2_CL` | [Cisco Secure Endpoint](solutions/cisco-secure-endpoint.md) | [Cisco Secure Endpoint (via Codeless Connector Framework)](connectors/ciscosecureendpointlogsccpdefinition.md) | | `CiscoSecureEndpointEventsV2_CL` | [Cisco Secure Endpoint](solutions/cisco-secure-endpoint.md) | [Cisco Secure Endpoint (via Codeless Connector Framework)](connectors/ciscosecureendpointlogsccpdefinition.md) | | `CiscoSecureEndpoint_CL` | [Cisco Secure Endpoint](solutions/cisco-secure-endpoint.md) | [[DEPRECATED] Cisco Secure Endpoint (AMP)](connectors/ciscosecureendpoint.md) | -| `Cisco_Umbrella_audit_CL` | [CiscoUmbrella](solutions/ciscoumbrella.md) | [Cisco Cloud Security](connectors/ciscoumbrelladataconnector.md), [Cisco Cloud Security (using elastic premium plan)](connectors/ciscoumbrelladataconnectorelasticpremium.md) | -| `Cisco_Umbrella_cloudfirewall_CL` | [CiscoUmbrella](solutions/ciscoumbrella.md) | [Cisco Cloud Security](connectors/ciscoumbrelladataconnector.md), [Cisco Cloud Security (using elastic premium plan)](connectors/ciscoumbrelladataconnectorelasticpremium.md) | -| `Cisco_Umbrella_dlp_CL` | [CiscoUmbrella](solutions/ciscoumbrella.md) | [Cisco Cloud Security](connectors/ciscoumbrelladataconnector.md), [Cisco Cloud Security (using elastic premium plan)](connectors/ciscoumbrelladataconnectorelasticpremium.md) | -| `Cisco_Umbrella_dns_CL` | [CiscoUmbrella](solutions/ciscoumbrella.md) | [Cisco Cloud Security](connectors/ciscoumbrelladataconnector.md), [Cisco Cloud Security (using elastic premium plan)](connectors/ciscoumbrelladataconnectorelasticpremium.md) | -| `Cisco_Umbrella_fileevent_CL` | [CiscoUmbrella](solutions/ciscoumbrella.md) | [Cisco Cloud Security](connectors/ciscoumbrelladataconnector.md), [Cisco Cloud Security (using elastic premium plan)](connectors/ciscoumbrelladataconnectorelasticpremium.md) | -| `Cisco_Umbrella_firewall_CL` | [CiscoUmbrella](solutions/ciscoumbrella.md) | [Cisco Cloud Security](connectors/ciscoumbrelladataconnector.md), [Cisco Cloud Security (using elastic premium plan)](connectors/ciscoumbrelladataconnectorelasticpremium.md) | -| `Cisco_Umbrella_intrusion_CL` | [CiscoUmbrella](solutions/ciscoumbrella.md) | [Cisco Cloud Security](connectors/ciscoumbrelladataconnector.md), [Cisco Cloud Security (using elastic premium plan)](connectors/ciscoumbrelladataconnectorelasticpremium.md) | -| `Cisco_Umbrella_ip_CL` | [CiscoUmbrella](solutions/ciscoumbrella.md) | [Cisco Cloud Security](connectors/ciscoumbrelladataconnector.md), [Cisco Cloud Security (using elastic premium plan)](connectors/ciscoumbrelladataconnectorelasticpremium.md) | -| `Cisco_Umbrella_proxy_CL` | [CiscoUmbrella](solutions/ciscoumbrella.md) | [Cisco Cloud Security](connectors/ciscoumbrelladataconnector.md), [Cisco Cloud Security (using elastic premium plan)](connectors/ciscoumbrelladataconnectorelasticpremium.md) | -| `Cisco_Umbrella_ravpnlogs_CL` | [CiscoUmbrella](solutions/ciscoumbrella.md) | [Cisco Cloud Security](connectors/ciscoumbrelladataconnector.md), [Cisco Cloud Security (using elastic premium plan)](connectors/ciscoumbrelladataconnectorelasticpremium.md) | -| `Cisco_Umbrella_ztaflow_CL` | [CiscoUmbrella](solutions/ciscoumbrella.md) | [Cisco Cloud Security](connectors/ciscoumbrelladataconnector.md), [Cisco Cloud Security (using elastic premium plan)](connectors/ciscoumbrelladataconnectorelasticpremium.md) | -| `Cisco_Umbrella_ztna_CL` | [CiscoUmbrella](solutions/ciscoumbrella.md) | [Cisco Cloud Security](connectors/ciscoumbrelladataconnector.md), [Cisco Cloud Security (using elastic premium plan)](connectors/ciscoumbrelladataconnectorelasticpremium.md) | +| [`Cisco_Umbrella_audit_CL`](tables/cisco-umbrella-audit-cl.md) | [CiscoUmbrella](solutions/ciscoumbrella.md) | [Cisco Cloud Security](connectors/ciscoumbrelladataconnector.md), [Cisco Cloud Security (using elastic premium plan)](connectors/ciscoumbrelladataconnectorelasticpremium.md) | +| [`Cisco_Umbrella_cloudfirewall_CL`](tables/cisco-umbrella-cloudfirewall-cl.md) | [CiscoUmbrella](solutions/ciscoumbrella.md) | [Cisco Cloud Security](connectors/ciscoumbrelladataconnector.md), [Cisco Cloud Security (using elastic premium plan)](connectors/ciscoumbrelladataconnectorelasticpremium.md) | +| [`Cisco_Umbrella_dlp_CL`](tables/cisco-umbrella-dlp-cl.md) | [CiscoUmbrella](solutions/ciscoumbrella.md) | [Cisco Cloud Security](connectors/ciscoumbrelladataconnector.md), [Cisco Cloud Security (using elastic premium plan)](connectors/ciscoumbrelladataconnectorelasticpremium.md) | +| [`Cisco_Umbrella_dns_CL`](tables/cisco-umbrella-dns-cl.md) | [CiscoUmbrella](solutions/ciscoumbrella.md) | [Cisco Cloud Security](connectors/ciscoumbrelladataconnector.md), [Cisco Cloud Security (using elastic premium plan)](connectors/ciscoumbrelladataconnectorelasticpremium.md) | +| [`Cisco_Umbrella_fileevent_CL`](tables/cisco-umbrella-fileevent-cl.md) | [CiscoUmbrella](solutions/ciscoumbrella.md) | [Cisco Cloud Security](connectors/ciscoumbrelladataconnector.md), [Cisco Cloud Security (using elastic premium plan)](connectors/ciscoumbrelladataconnectorelasticpremium.md) | +| [`Cisco_Umbrella_firewall_CL`](tables/cisco-umbrella-firewall-cl.md) | [CiscoUmbrella](solutions/ciscoumbrella.md) | [Cisco Cloud Security](connectors/ciscoumbrelladataconnector.md), [Cisco Cloud Security (using elastic premium plan)](connectors/ciscoumbrelladataconnectorelasticpremium.md) | +| [`Cisco_Umbrella_intrusion_CL`](tables/cisco-umbrella-intrusion-cl.md) | [CiscoUmbrella](solutions/ciscoumbrella.md) | [Cisco Cloud Security](connectors/ciscoumbrelladataconnector.md), [Cisco Cloud Security (using elastic premium plan)](connectors/ciscoumbrelladataconnectorelasticpremium.md) | +| [`Cisco_Umbrella_ip_CL`](tables/cisco-umbrella-ip-cl.md) | [CiscoUmbrella](solutions/ciscoumbrella.md) | [Cisco Cloud Security](connectors/ciscoumbrelladataconnector.md), [Cisco Cloud Security (using elastic premium plan)](connectors/ciscoumbrelladataconnectorelasticpremium.md) | +| [`Cisco_Umbrella_proxy_CL`](tables/cisco-umbrella-proxy-cl.md) | [CiscoUmbrella](solutions/ciscoumbrella.md) | [Cisco Cloud Security](connectors/ciscoumbrelladataconnector.md), [Cisco Cloud Security (using elastic premium plan)](connectors/ciscoumbrelladataconnectorelasticpremium.md) | +| [`Cisco_Umbrella_ravpnlogs_CL`](tables/cisco-umbrella-ravpnlogs-cl.md) | [CiscoUmbrella](solutions/ciscoumbrella.md) | [Cisco Cloud Security](connectors/ciscoumbrelladataconnector.md), [Cisco Cloud Security (using elastic premium plan)](connectors/ciscoumbrelladataconnectorelasticpremium.md) | +| [`Cisco_Umbrella_ztaflow_CL`](tables/cisco-umbrella-ztaflow-cl.md) | [CiscoUmbrella](solutions/ciscoumbrella.md) | [Cisco Cloud Security](connectors/ciscoumbrelladataconnector.md), [Cisco Cloud Security (using elastic premium plan)](connectors/ciscoumbrelladataconnectorelasticpremium.md) | +| [`Cisco_Umbrella_ztna_CL`](tables/cisco-umbrella-ztna-cl.md) | [CiscoUmbrella](solutions/ciscoumbrella.md) | [Cisco Cloud Security](connectors/ciscoumbrelladataconnector.md), [Cisco Cloud Security (using elastic premium plan)](connectors/ciscoumbrelladataconnectorelasticpremium.md) | | `CitrixAnalytics_indicatorEventDetails_CL` | [Citrix Analytics for Security](solutions/citrix-analytics-for-security.md) | [CITRIX SECURITY ANALYTICS](connectors/citrix.md) | | `CitrixAnalytics_indicatorSummary_CL` | [Citrix Analytics for Security](solutions/citrix-analytics-for-security.md) | [CITRIX SECURITY ANALYTICS](connectors/citrix.md) | | `CitrixAnalytics_riskScoreChange_CL` | [Citrix Analytics for Security](solutions/citrix-analytics-for-security.md) | [CITRIX SECURITY ANALYTICS](connectors/citrix.md) | | `CitrixAnalytics_userProfile_CL` | [Citrix Analytics for Security](solutions/citrix-analytics-for-security.md) | [CITRIX SECURITY ANALYTICS](connectors/citrix.md) | -| `CloudAppEvents` | [Microsoft Defender XDR](solutions/microsoft-defender-xdr.md) | [Microsoft Defender XDR](connectors/microsoftthreatprotection.md) | | `CloudGuard_SecurityEvents_CL` | [Check Point CloudGuard CNAPP](solutions/check-point-cloudguard-cnapp.md) | [Check Point CloudGuard CNAPP Connector for Microsoft Sentinel](connectors/cloudguardccpdefinition.md) | | `Cloud_Integrated_CL` | [Mimecast](solutions/mimecast.md) | [Mimecast Cloud Integrated](connectors/mimecastciapi.md) | -| `CloudflareV2_CL` | [Cloudflare](solutions/cloudflare.md), [Cloudflare CCF](solutions/cloudflare-ccf.md) | [Cloudflare (Using Blob Container) (via Codeless Connector Framework)](connectors/cloudflaredefinition.md) | +| [`CloudflareV2_CL`](tables/cloudflarev2-cl.md) | [Cloudflare](solutions/cloudflare.md), [Cloudflare CCF](solutions/cloudflare-ccf.md) | [Cloudflare (Using Blob Container) (via Codeless Connector Framework)](connectors/cloudflaredefinition.md) | | `Cloudflare_CL` | [Cloudflare](solutions/cloudflare.md) | [[DEPRECATED] Cloudflare](connectors/cloudflaredataconnector.md) | | `Cofense_Triage_failed_indicators_CL` | [CofenseTriage](solutions/cofensetriage.md) | [Cofense Triage Threat Indicators Ingestion](connectors/cofensetriage.md) | | `CognniIncidents_CL` | [Cognni](solutions/cognni.md) | [Cognni](connectors/cognnisentineldataconnector.md) | | `Cohesity_CL` | [CohesitySecurity](solutions/cohesitysecurity.md) | [Cohesity](connectors/cohesitydataconnector.md) | -| `CommonSecurityLog` | [AI Analyst Darktrace](solutions/ai-analyst-darktrace.md), [Akamai Security Events](solutions/akamai-security-events.md), [AristaAwakeSecurity](solutions/aristaawakesecurity.md) +55 more | [[Deprecated] Vectra AI Detect via Legacy Agent](connectors/aivectradetect.md), [[Deprecated] Vectra AI Detect via AMA](connectors/aivectradetectama.md), [[Deprecated] Akamai Security Events via Legacy Agent](connectors/akamaisecurityevents.md), [[Deprecated] Akamai Security Events via AMA](connectors/akamaisecurityeventsama.md), [[Deprecated] Awake Security via Legacy Agent](connectors/aristaawakesecurity.md) +94 more | +| [`CommonSecurityLog`](tables/commonsecuritylog.md) | [AI Analyst Darktrace](solutions/ai-analyst-darktrace.md), [Akamai Security Events](solutions/akamai-security-events.md), [AristaAwakeSecurity](solutions/aristaawakesecurity.md) [+51 more](tables/commonsecuritylog.md) | [[Deprecated] Vectra AI Detect via Legacy Agent](connectors/aivectradetect.md), [[Deprecated] Vectra AI Detect via AMA](connectors/aivectradetectama.md), [[Deprecated] Akamai Security Events via Legacy Agent](connectors/akamaisecurityevents.md), [[Deprecated] Akamai Security Events via AMA](connectors/akamaisecurityeventsama.md), [[Deprecated] Awake Security via Legacy Agent](connectors/aristaawakesecurity.md) [+86 more](tables/commonsecuritylog.md) | | `CommvaultSecurityIQ_CL` | [Commvault Security IQ](solutions/commvault-security-iq.md) | [CommvaultSecurityIQ](connectors/commvaultsecurityiq-cl.md) | | `ConfluenceAuditLogs_CL` | [AtlassianConfluenceAudit](solutions/atlassianconfluenceaudit.md) | [ Atlassian Confluence Audit (via Codeless Connector Framework)](connectors/confluenceauditccpdefinition.md) | | `Confluence_Audit_CL` | [AtlassianConfluenceAudit](solutions/atlassianconfluenceaudit.md) | [[Deprecated] Atlassian Confluence Audit](connectors/confluenceauditapi.md) | -| `ContainerInventory` | [Azure kubernetes Service](solutions/azure-kubernetes-service.md) | [Azure Kubernetes Service (AKS)](connectors/azurekubernetes.md) | | `ContrastADRIncident_CL` | [ContrastADR](solutions/contrastadr.md) | [ContrastADR](connectors/contrastadr.md) | | `ContrastADR_CL` | [ContrastADR](solutions/contrastadr.md) | [ContrastADR](connectors/contrastadr.md) | | `Corelight_CL` | [Corelight](solutions/corelight.md) | [Corelight Connector Exporter](connectors/corelightconnectorexporter.md) | @@ -312,7 +288,7 @@ This page lists **811 unique tables** ingested by connectors. | `CrowdStrikeHosts` | [CrowdStrike Falcon Endpoint Protection](solutions/crowdstrike-falcon-endpoint-protection.md) | [CrowdStrike API Data Connector (via Codeless Connector Framework)](connectors/crowdstrikeapiccpdefinition.md) | | `CrowdStrikeIncidents` | [CrowdStrike Falcon Endpoint Protection](solutions/crowdstrike-falcon-endpoint-protection.md) | [CrowdStrike API Data Connector (via Codeless Connector Framework)](connectors/crowdstrikeapiccpdefinition.md) | | `CrowdStrikeVulnerabilities` | [CrowdStrike Falcon Endpoint Protection](solutions/crowdstrike-falcon-endpoint-protection.md) | [CrowdStrike API Data Connector (via Codeless Connector Framework)](connectors/crowdstrikeapiccpdefinition.md) | -| `CrowdStrike_Additional_Events_CL` | [CrowdStrike Falcon Endpoint Protection](solutions/crowdstrike-falcon-endpoint-protection.md) | [CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)](connectors/crowdstrikefalcons3ccpdefinition.md), [CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)](connectors/crowdstrikereplicatorv2.md) | +| [`CrowdStrike_Additional_Events_CL`](tables/crowdstrike-additional-events-cl.md) | [CrowdStrike Falcon Endpoint Protection](solutions/crowdstrike-falcon-endpoint-protection.md) | [CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)](connectors/crowdstrikefalcons3ccpdefinition.md), [CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)](connectors/crowdstrikereplicatorv2.md) | | `CrowdStrike_Audit_Events_CL` | [CrowdStrike Falcon Endpoint Protection](solutions/crowdstrike-falcon-endpoint-protection.md) | [CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)](connectors/crowdstrikefalcons3ccpdefinition.md) | | `CrowdStrike_Auth_Events_CL` | [CrowdStrike Falcon Endpoint Protection](solutions/crowdstrike-falcon-endpoint-protection.md) | [CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)](connectors/crowdstrikefalcons3ccpdefinition.md) | | `CrowdStrike_DNS_Events_CL` | [CrowdStrike Falcon Endpoint Protection](solutions/crowdstrike-falcon-endpoint-protection.md) | [CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)](connectors/crowdstrikefalcons3ccpdefinition.md) | @@ -320,17 +296,17 @@ This page lists **811 unique tables** ingested by connectors. | `CrowdStrike_Network_Events_CL` | [CrowdStrike Falcon Endpoint Protection](solutions/crowdstrike-falcon-endpoint-protection.md) | [CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)](connectors/crowdstrikefalcons3ccpdefinition.md) | | `CrowdStrike_Process_Events_CL` | [CrowdStrike Falcon Endpoint Protection](solutions/crowdstrike-falcon-endpoint-protection.md) | [CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)](connectors/crowdstrikefalcons3ccpdefinition.md) | | `CrowdStrike_Registry_Events_CL` | [CrowdStrike Falcon Endpoint Protection](solutions/crowdstrike-falcon-endpoint-protection.md) | [CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)](connectors/crowdstrikefalcons3ccpdefinition.md) | -| `CrowdStrike_Secondary_Data_CL` | [CrowdStrike Falcon Endpoint Protection](solutions/crowdstrike-falcon-endpoint-protection.md) | [CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)](connectors/crowdstrikefalcons3ccpdefinition.md), [CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)](connectors/crowdstrikereplicatorv2.md) | +| [`CrowdStrike_Secondary_Data_CL`](tables/crowdstrike-secondary-data-cl.md) | [CrowdStrike Falcon Endpoint Protection](solutions/crowdstrike-falcon-endpoint-protection.md) | [CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)](connectors/crowdstrikefalcons3ccpdefinition.md), [CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)](connectors/crowdstrikereplicatorv2.md) | | `CrowdStrike_User_Events_CL` | [CrowdStrike Falcon Endpoint Protection](solutions/crowdstrike-falcon-endpoint-protection.md) | [CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)](connectors/crowdstrikefalcons3ccpdefinition.md) | | `CyberArkAudit` | [CyberArkAudit](solutions/cyberarkaudit.md) | [CyberArkAudit](connectors/cyberarkaudit.md) | | `CyberArk_AuditEvents_CL` | [CyberArkAudit](solutions/cyberarkaudit.md) | [CyberArkAudit](connectors/cyberarkaudit.md) | | `CyberSixgill_Alerts_CL` | [Cybersixgill-Actionable-Alerts](solutions/cybersixgill-actionable-alerts.md) | [Cybersixgill Actionable Alerts](connectors/cybersixgillactionablealerts.md) | | `CyberpionActionItems_CL` | [IONIX](solutions/ionix.md) | [IONIX Security Logs](connectors/cyberpionsecuritylogs.md) | -| `CyeraAssets_CL` | [CyeraDSPM](solutions/cyeradspm.md) | [Cyera DSPM Azure Sentinel Data Connector](connectors/cyeradspmccf.md), [Cyera DSPM Azure Functions Sentinel Data Connector](connectors/cyerafunctionsconnector.md) | -| `CyeraAssets_MS_CL` | [CyeraDSPM](solutions/cyeradspm.md) | [Cyera DSPM Azure Sentinel Data Connector](connectors/cyeradspmccf.md), [Cyera DSPM Azure Functions Sentinel Data Connector](connectors/cyerafunctionsconnector.md) | -| `CyeraClassifications_CL` | [CyeraDSPM](solutions/cyeradspm.md) | [Cyera DSPM Azure Sentinel Data Connector](connectors/cyeradspmccf.md), [Cyera DSPM Azure Functions Sentinel Data Connector](connectors/cyerafunctionsconnector.md) | -| `CyeraIdentities_CL` | [CyeraDSPM](solutions/cyeradspm.md) | [Cyera DSPM Azure Sentinel Data Connector](connectors/cyeradspmccf.md), [Cyera DSPM Azure Functions Sentinel Data Connector](connectors/cyerafunctionsconnector.md) | -| `CyeraIssues_CL` | [CyeraDSPM](solutions/cyeradspm.md) | [Cyera DSPM Azure Sentinel Data Connector](connectors/cyeradspmccf.md), [Cyera DSPM Azure Functions Sentinel Data Connector](connectors/cyerafunctionsconnector.md) | +| [`CyeraAssets_CL`](tables/cyeraassets-cl.md) | [CyeraDSPM](solutions/cyeradspm.md) | [Cyera DSPM Microsoft Sentinel Data Connector](connectors/cyeradspmccf.md), [Cyera DSPM Azure Functions Microsoft Sentinel Data Connector](connectors/cyerafunctionsconnector.md) | +| [`CyeraAssets_MS_CL`](tables/cyeraassets-ms-cl.md) | [CyeraDSPM](solutions/cyeradspm.md) | [Cyera DSPM Microsoft Sentinel Data Connector](connectors/cyeradspmccf.md), [Cyera DSPM Azure Functions Microsoft Sentinel Data Connector](connectors/cyerafunctionsconnector.md) | +| [`CyeraClassifications_CL`](tables/cyeraclassifications-cl.md) | [CyeraDSPM](solutions/cyeradspm.md) | [Cyera DSPM Microsoft Sentinel Data Connector](connectors/cyeradspmccf.md), [Cyera DSPM Azure Functions Microsoft Sentinel Data Connector](connectors/cyerafunctionsconnector.md) | +| [`CyeraIdentities_CL`](tables/cyeraidentities-cl.md) | [CyeraDSPM](solutions/cyeradspm.md) | [Cyera DSPM Microsoft Sentinel Data Connector](connectors/cyeradspmccf.md), [Cyera DSPM Azure Functions Microsoft Sentinel Data Connector](connectors/cyerafunctionsconnector.md) | +| [`CyeraIssues_CL`](tables/cyeraissues-cl.md) | [CyeraDSPM](solutions/cyeradspm.md) | [Cyera DSPM Microsoft Sentinel Data Connector](connectors/cyeradspmccf.md), [Cyera DSPM Azure Functions Microsoft Sentinel Data Connector](connectors/cyerafunctionsconnector.md) | | `CyfirmaASCertificatesAlerts_CL` | [Cyfirma Attack Surface](solutions/cyfirma-attack-surface.md) | [CYFIRMA Attack Surface](connectors/cyfirmaattacksurfacealertsconnector.md) | | `CyfirmaASCloudWeaknessAlerts_CL` | [Cyfirma Attack Surface](solutions/cyfirma-attack-surface.md) | [CYFIRMA Attack Surface](connectors/cyfirmaattacksurfacealertsconnector.md) | | `CyfirmaASConfigurationAlerts_CL` | [Cyfirma Attack Surface](solutions/cyfirma-attack-surface.md) | [CYFIRMA Attack Surface](connectors/cyfirmaattacksurfacealertsconnector.md) | @@ -378,19 +354,7 @@ This page lists **811 unique tables** ingested by connectors. | `DataminrPulse_Alerts_CL` | [Dataminr Pulse](solutions/dataminr-pulse.md) | [Dataminr Pulse Alerts Data Connector](connectors/dataminrpulsealerts.md) | | `DefendAuditData` | [Egress Iris](solutions/egress-iris.md) | [Egress Iris Connector](connectors/egresssiempolling.md) | | `Detections_Data_CL` | [Vectra XDR](solutions/vectra-xdr.md) | [Vectra XDR](connectors/vectraxdr.md) | -| `DeviceEvents` | [Microsoft Defender XDR](solutions/microsoft-defender-xdr.md) | [Microsoft Defender XDR](connectors/microsoftthreatprotection.md) | -| `DeviceFileCertificateInfo` | [Microsoft Defender XDR](solutions/microsoft-defender-xdr.md) | [Microsoft Defender XDR](connectors/microsoftthreatprotection.md) | -| `DeviceFileEvents` | [Microsoft Defender XDR](solutions/microsoft-defender-xdr.md) | [Microsoft Defender XDR](connectors/microsoftthreatprotection.md) | -| `DeviceImageLoadEvents` | [Microsoft Defender XDR](solutions/microsoft-defender-xdr.md) | [Microsoft Defender XDR](connectors/microsoftthreatprotection.md) | -| `DeviceInfo` | [Microsoft Defender XDR](solutions/microsoft-defender-xdr.md) | [Microsoft Defender XDR](connectors/microsoftthreatprotection.md) | -| `DeviceLogonEvents` | [Microsoft Defender XDR](solutions/microsoft-defender-xdr.md) | [Microsoft Defender XDR](connectors/microsoftthreatprotection.md) | -| `DeviceNetworkEvents` | [Microsoft Defender XDR](solutions/microsoft-defender-xdr.md) | [Microsoft Defender XDR](connectors/microsoftthreatprotection.md) | -| `DeviceNetworkInfo` | [Microsoft Defender XDR](solutions/microsoft-defender-xdr.md) | [Microsoft Defender XDR](connectors/microsoftthreatprotection.md) | -| `DeviceProcessEvents` | [Microsoft Defender XDR](solutions/microsoft-defender-xdr.md) | [Microsoft Defender XDR](connectors/microsoftthreatprotection.md) | -| `DeviceRegistryEvents` | [Microsoft Defender XDR](solutions/microsoft-defender-xdr.md) | [Microsoft Defender XDR](connectors/microsoftthreatprotection.md) | | `DigitalShadows_CL` | [Digital Shadows](solutions/digital-shadows.md) | [Digital Shadows Searchlight](connectors/digitalshadowssearchlightazurefunctions.md) | -| `DnsEvents` | [Windows Server DNS](solutions/windows-server-dns.md) | [DNS](connectors/dns.md) | -| `DnsInventory` | [Windows Server DNS](solutions/windows-server-dns.md) | [DNS](connectors/dns.md) | | `DoppelTable_CL` | [Doppel](solutions/doppel.md) | [Doppel Data Connector](connectors/doppel-dataconnector.md) | | `DragosAlerts_CL` | [Dragos](solutions/dragos.md) | [ Dragos Notifications via Cloud Sitestore](connectors/dragossitestoreccp.md) | | `DruvaInsyncEvents_CL` | [DruvaDataSecurityCloud](solutions/druvadatasecuritycloud.md) | [Druva Events Connector](connectors/druvaeventccpdefinition.md) | @@ -402,7 +366,6 @@ This page lists **811 unique tables** ingested by connectors. | `DynatraceProblems_CL` | [Dynatrace](solutions/dynatrace.md) | [Dynatrace Problems](connectors/dynatraceproblems.md) | | `DynatraceSecurityProblems_CL` | [Dynatrace](solutions/dynatrace.md) | [Dynatrace Runtime Vulnerabilities](connectors/dynatraceruntimevulnerabilities.md) | | `darktrace_model_alerts_CL` | [Darktrace](solutions/darktrace.md) | [Darktrace Connector for Microsoft Sentinel REST API](connectors/darktracerestconnector.md) | -| `discoveryLogs` | [Microsoft Defender for Cloud Apps](solutions/microsoft-defender-for-cloud-apps.md) | [Microsoft Defender for Cloud Apps](connectors/microsoftcloudappsecurity.md) | | `dossier_atp_CL` | [Infoblox](solutions/infoblox.md) | [Infoblox Data Connector via REST API](connectors/infobloxdataconnector.md) | | `dossier_atp_threat_CL` | [Infoblox](solutions/infoblox.md) | [Infoblox Data Connector via REST API](connectors/infobloxdataconnector.md) | | `dossier_dns_CL` | [Infoblox](solutions/infoblox.md) | [Infoblox Data Connector via REST API](connectors/infobloxdataconnector.md) | @@ -430,15 +393,11 @@ This page lists **811 unique tables** ingested by connectors. | `EgressDefend_CL` | [Egress Defend](solutions/egress-defend.md) | [Egress Defend](connectors/egressdefendpolling.md) | | `EgressEvents_CL` | [Egress Iris](solutions/egress-iris.md) | [Egress Iris Connector](connectors/egresssiempolling.md) | | `ElasticAgentLogs_CL` | [ElasticAgent](solutions/elasticagent.md) | [Elastic Agent](connectors/elasticagent.md) | -| `EmailAttachmentInfo` | [Microsoft Defender XDR](solutions/microsoft-defender-xdr.md) | [Microsoft Defender XDR](connectors/microsoftthreatprotection.md) | -| `EmailEvents` | [Microsoft Defender XDR](solutions/microsoft-defender-xdr.md) | [Microsoft Defender XDR](connectors/microsoftthreatprotection.md) | -| `EmailPostDeliveryEvents` | [Microsoft Defender XDR](solutions/microsoft-defender-xdr.md) | [Microsoft Defender XDR](connectors/microsoftthreatprotection.md) | -| `EmailUrlInfo` | [Microsoft Defender XDR](solutions/microsoft-defender-xdr.md) | [Microsoft Defender XDR](connectors/microsoftthreatprotection.md) | | `Entities_Data_CL` | [Vectra XDR](solutions/vectra-xdr.md) | [Vectra XDR](connectors/vectraxdr.md) | | `Entity_Scoring_Data_CL` | [Vectra XDR](solutions/vectra-xdr.md) | [Vectra XDR](connectors/vectraxdr.md) | | `ErmesBrowserSecurityEvents_CL` | [Ermes Browser Security](solutions/ermes-browser-security.md) | [Ermes Browser Security Events](connectors/ermesbrowsersecurityevents.md) | -| `Event` | [ALC-WebCTRL](solutions/alc-webctrl.md), [Microsoft Exchange Security - Exchange On-Premises](solutions/microsoft-exchange-security---exchange-on-premises.md), [MimecastTIRegional](solutions/mimecasttiregional.md) | [Automated Logic WebCTRL ](connectors/automatedlogicwebctrl.md), [[Deprecated] Microsoft Exchange Logs and Events](connectors/esi-exchangeadminauditlogevents.md), [Microsoft Exchange Admin Audit Logs by Event Logs](connectors/esi-opt1exchangeadminauditlogsbyeventlogs.md), [Microsoft Exchange Logs and Events](connectors/esi-opt2exchangeserverseventlogs.md), [Mimecast Intelligence for Microsoft - Microsoft Sentinel](connectors/mimecasttiregionalconnectorazurefunctions.md) | -| `ExchangeHttpProxy_CL` | [Microsoft Exchange Security - Exchange On-Premises](solutions/microsoft-exchange-security---exchange-on-premises.md) | [[Deprecated] Microsoft Exchange Logs and Events](connectors/esi-exchangeadminauditlogevents.md), [Microsoft Exchange HTTP Proxy Logs](connectors/esi-opt7exchangehttpproxylogs.md) | +| [`Event`](tables/event.md) | [ALC-WebCTRL](solutions/alc-webctrl.md), [Microsoft Exchange Security - Exchange On-Premises](solutions/microsoft-exchange-security---exchange-on-premises.md), [MimecastTIRegional](solutions/mimecasttiregional.md) | [Automated Logic WebCTRL ](connectors/automatedlogicwebctrl.md), [[Deprecated] Microsoft Exchange Logs and Events](connectors/esi-exchangeadminauditlogevents.md), [Microsoft Exchange Admin Audit Logs by Event Logs](connectors/esi-opt1exchangeadminauditlogsbyeventlogs.md), [Microsoft Exchange Logs and Events](connectors/esi-opt2exchangeserverseventlogs.md), [Mimecast Intelligence for Microsoft - Microsoft Sentinel](connectors/mimecasttiregionalconnectorazurefunctions.md) | +| [`ExchangeHttpProxy_CL`](tables/exchangehttpproxy-cl.md) | [Microsoft Exchange Security - Exchange On-Premises](solutions/microsoft-exchange-security---exchange-on-premises.md) | [[Deprecated] Microsoft Exchange Logs and Events](connectors/esi-exchangeadminauditlogevents.md), [Microsoft Exchange HTTP Proxy Logs](connectors/esi-opt7exchangehttpproxylogs.md) | | `ExtraHop_Detections_CL` | [ExtraHop](solutions/extrahop.md) | [ExtraHop Detections Data Connector](connectors/extrahop.md) | | `eset_CL` | [Eset Security Management Center](solutions/eset-security-management-center.md) | [Eset Security Management Center](connectors/esetsmc.md) | | `eventsapplicationdata_CL` | [Netskopev2](solutions/netskopev2.md) | [Netskope Data Connector](connectors/netskopedataconnector.md) | @@ -447,7 +406,6 @@ This page lists **811 unique tables** ingested by connectors. | `eventsincidentdata_CL` | [Netskopev2](solutions/netskopev2.md) | [Netskope Data Connector](connectors/netskopedataconnector.md) | | `eventsnetworkdata_CL` | [Netskopev2](solutions/netskopev2.md) | [Netskope Data Connector](connectors/netskopedataconnector.md) | | `eventspagedata_CL` | [Netskopev2](solutions/netskopev2.md) | [Netskope Data Connector](connectors/netskopedataconnector.md) | -| `exchange` | [Microsoft 365](solutions/microsoft-365.md) | [Microsoft 365 (formerly, Office 365)](connectors/office365.md) | ## F @@ -542,9 +500,6 @@ This page lists **811 unique tables** ingested by connectors. | Table | Solutions | Connectors | |-------|-----------|------------| -| `IdentityDirectoryEvents` | [Microsoft Defender XDR](solutions/microsoft-defender-xdr.md) | [Microsoft Defender XDR](connectors/microsoftthreatprotection.md) | -| `IdentityLogonEvents` | [Microsoft Defender XDR](solutions/microsoft-defender-xdr.md) | [Microsoft Defender XDR](connectors/microsoftthreatprotection.md) | -| `IdentityQueryEvents` | [Microsoft Defender XDR](solutions/microsoft-defender-xdr.md) | [Microsoft Defender XDR](connectors/microsoftthreatprotection.md) | | `IllumioFlowEventsV2_CL` | [IllumioSaaS](solutions/illumiosaas.md) | [Illumio Saas](connectors/illumiosaasccfdefinition.md) | | `IllumioInsightsSummary_CL` | [Illumio Insight](solutions/illumio-insight.md) | [Illumio Insights Summary](connectors/illumioinsightssummaryccp.md) | | `IllumioInsights_CL` | [Illumio Insight](solutions/illumio-insight.md) | [Illumio Insights](connectors/illumioinsightsdefinition.md) | @@ -553,7 +508,7 @@ This page lists **811 unique tables** ingested by connectors. | `ImpervaWAFCloudV2_CL` | [ImpervaCloudWAF](solutions/impervacloudwaf.md) | [Imperva Cloud WAF](connectors/impervacloudwaflogsccfdefinition.md) | | `ImpervaWAFCloud_CL` | [ImpervaCloudWAF](solutions/impervacloudwaf.md) | [Imperva Cloud WAF](connectors/impervawafcloudapi.md) | | `InfoSecAnalytics_CL` | [AgileSec Analytics Connector](solutions/agilesec-analytics-connector.md) | [InfoSecGlobal Data Connector](connectors/infosecdataconnector.md) | -| `InfobloxInsight_CL` | [Infoblox](solutions/infoblox.md), [Infoblox SOC Insights](solutions/infoblox-soc-insights.md) | [Infoblox SOC Insight Data Connector via REST API](connectors/infobloxsocinsightsdataconnector-api.md) | +| [`InfobloxInsight_CL`](tables/infobloxinsight-cl.md) | [Infoblox](solutions/infoblox.md), [Infoblox SOC Insights](solutions/infoblox-soc-insights.md) | [Infoblox SOC Insight Data Connector via REST API](connectors/infobloxsocinsightsdataconnector-api.md) | | `Infoblox_Failed_Indicators_CL` | [Infoblox](solutions/infoblox.md) | [Infoblox Data Connector via REST API](connectors/infobloxdataconnector.md) | | `IntegrationTableIncidents_CL` | [ESET Protect Platform](solutions/eset-protect-platform.md) | [ESET Protect Platform](connectors/esetprotectplatform.md) | | `IntegrationTable_CL` | [ESET Protect Platform](solutions/eset-protect-platform.md) | [ESET Protect Platform](connectors/esetprotectplatform.md) | @@ -595,7 +550,6 @@ This page lists **811 unique tables** ingested by connectors. | Table | Solutions | Connectors | |-------|-----------|------------| | `KeeperSecurityEventNewLogs_CL` | [Keeper Security](solutions/keeper-security.md) | [Keeper Security Push Connector](connectors/keepersecuritypush2.md) | -| `KubeEvents` | [Azure kubernetes Service](solutions/azure-kubernetes-service.md) | [Azure Kubernetes Service (AKS)](connectors/azurekubernetes.md) | ## L @@ -617,9 +571,7 @@ This page lists **811 unique tables** ingested by connectors. | `MailGuard365_Threats_CL` | [MailGuard 365](solutions/mailguard-365.md) | [MailGuard 365](connectors/mailguard365.md) | | `MailRiskEventEmails_CL` | [MailRisk](solutions/mailrisk.md) | [MailRisk by Secure Practice](connectors/securepracticemailriskconnector.md) | | `Malware_Data_CL` | [CofenseIntelligence](solutions/cofenseintelligence.md) | [Cofense Intelligence Threat Indicators Ingestion](connectors/cofenseintelligence.md) | -| `ManagedIdentitySignInLogs` | [Microsoft Entra ID](solutions/microsoft-entra-id.md) | [Microsoft Entra ID](connectors/azureactivedirectory.md) | -| `McasShadowItReporting` | [Microsoft Defender for Cloud Apps](solutions/microsoft-defender-for-cloud-apps.md) | [Microsoft Defender for Cloud Apps](connectors/microsoftcloudappsecurity.md) | -| `MessageTrackingLog_CL` | [Microsoft Exchange Security - Exchange On-Premises](solutions/microsoft-exchange-security---exchange-on-premises.md) | [[Deprecated] Microsoft Exchange Logs and Events](connectors/esi-exchangeadminauditlogevents.md), [Microsoft Exchange Message Tracking Logs](connectors/esi-opt6exchangemessagetrackinglogs.md) | +| [`MessageTrackingLog_CL`](tables/messagetrackinglog-cl.md) | [Microsoft Exchange Security - Exchange On-Premises](solutions/microsoft-exchange-security---exchange-on-premises.md) | [[Deprecated] Microsoft Exchange Logs and Events](connectors/esi-exchangeadminauditlogevents.md), [Microsoft Exchange Message Tracking Logs](connectors/esi-opt6exchangemessagetrackinglogs.md) | | `MicrosoftPurviewInformationProtection` | [Microsoft Purview Information Protection](solutions/microsoft-purview-information-protection.md) | [Microsoft Purview Information Protection](connectors/microsoftpurviewinformationprotection.md) | | `MimecastAudit_CL` | [MimecastAudit](solutions/mimecastaudit.md) | [Mimecast Audit & Authentication](connectors/mimecastauditapi.md) | | `MimecastDLP_CL` | [MimecastSEG](solutions/mimecastseg.md) | [Mimecast Secure Email Gateway](connectors/mimecastsiemapi.md) | @@ -631,7 +583,7 @@ This page lists **811 unique tables** ingested by connectors. | `MorphisecAlerts_CL` | [Morphisec](solutions/morphisec.md) | [Morphisec API Data Connector (via Codeless Connector Framework)](connectors/morphisecccf.md) | | `MuleSoft_Cloudhub_CL` | [Mulesoft](solutions/mulesoft.md) | [MuleSoft Cloudhub](connectors/mulesoft.md) | | `maillog_CL` | [Proofpoint On demand(POD) Email Security](solutions/proofpoint-on-demand(pod)-email-security.md) | [[Deprecated] Proofpoint On Demand Email Security](connectors/proofpointpod.md) | -| `meraki_CL` | [CiscoMeraki](solutions/ciscomeraki.md) | [[Deprecated] Cisco Meraki](connectors/ciscomeraki.md), [Cisco Meraki (using REST API)](connectors/ciscomerakinativepoller.md) | +| [`meraki_CL`](tables/meraki-cl.md) | [CiscoMeraki](solutions/ciscomeraki.md) | [[Deprecated] Cisco Meraki](connectors/ciscomeraki.md), [Cisco Meraki (using REST API)](connectors/ciscomerakinativepoller.md) | ## N @@ -656,11 +608,8 @@ This page lists **811 unique tables** ingested by connectors. | `NetskopeWebtxErrors_CL` | [Netskopev2](solutions/netskopev2.md) | [Netskope Web Transactions Data Connector](connectors/netskopewebtransactionsdataconnector.md) | | `Netskope_CL` | [Netskope](solutions/netskope.md) | [Netskope](connectors/netskope.md) | | `Netskope_WebTx_metrics_CL` | [Netskopev2](solutions/netskopev2.md) | [Netskope Data Connector](connectors/netskopedataconnector.md) | -| `NetworkAccessTraffic` | [Microsoft Entra ID](solutions/microsoft-entra-id.md) | [Microsoft Entra ID](connectors/azureactivedirectory.md) | -| `NetworkAccessTrafficLogs` | [Microsoft Entra ID](solutions/microsoft-entra-id.md) | [Microsoft Entra ID](connectors/azureactivedirectory.md) | | `NexposeInsightVMCloud_assets_CL` | [Rapid7InsightVM](solutions/rapid7insightvm.md) | [Rapid7 Insight Platform Vulnerability Management Reports](connectors/insightvmcloudapi.md) | | `NexposeInsightVMCloud_vulnerabilities_CL` | [Rapid7InsightVM](solutions/rapid7insightvm.md) | [Rapid7 Insight Platform Vulnerability Management Reports](connectors/insightvmcloudapi.md) | -| `NonInteractiveUserSignInLogs` | [Microsoft Entra ID](solutions/microsoft-entra-id.md) | [Microsoft Entra ID](connectors/azureactivedirectory.md) | | `NonameAPISecurityAlert_CL` | [NonameSecurity](solutions/nonamesecurity.md) | [Noname Security for Microsoft Sentinel](connectors/nonamesecuritymicrosoftsentinel.md) | | `NordPassEventLogs_CL` | [NordPass](solutions/nordpass.md) | [NordPass](connectors/nordpass.md) | | `net_assets_CL` | [HolmSecurity](solutions/holmsecurity.md) | [Holm Security Asset Data](connectors/holmsecurityassets.md) | @@ -673,15 +622,14 @@ This page lists **811 unique tables** ingested by connectors. | `OCI_Logs_CL` | [Oracle Cloud Infrastructure](solutions/oracle-cloud-infrastructure.md) | [[DEPRECATED] Oracle Cloud Infrastructure](connectors/oraclecloudinfrastructurelogsconnector.md) | | `ObsidianActivity_CL` | [Obsidian Datasharing](solutions/obsidian-datasharing.md) | [Obsidian Datasharing Connector](connectors/obsidiandatasharing.md) | | `ObsidianThreat_CL` | [Obsidian Datasharing](solutions/obsidian-datasharing.md) | [Obsidian Datasharing Connector](connectors/obsidiandatasharing.md) | -| `OfficeActivity` | [Microsoft 365](solutions/microsoft-365.md) | [Microsoft 365 (formerly, Office 365)](connectors/office365.md) | | `OktaNativePoller_CL` | [Okta Single Sign-On](solutions/okta-single-sign-on.md) | [Okta Single Sign-On (Polling CCP)](connectors/oktasso-polling.md) | | `OktaV2_CL` | [Okta Single Sign-On](solutions/okta-single-sign-on.md) | [Okta Single Sign-On](connectors/oktassov2.md) | -| `Okta_CL` | [Okta Single Sign-On](solutions/okta-single-sign-on.md) | [Okta Single Sign-On](connectors/oktasso.md), [Okta Single Sign-On](connectors/oktassov2.md) | -| `Onapsis_Defend_CL` | [Onapsis Defend](solutions/onapsis-defend.md) | [Onapsis Defend Integration](connectors/onapsis.md), [Onapsis Defend: Integrate Unmatched SAP Threat Detection & Intel with Microsoft Sentinel](connectors/onapsis.md) | -| `OneLoginEventsV2_CL` | [OneLoginIAM](solutions/oneloginiam.md) | [[DEPRECATED] OneLogin IAM Platform](connectors/onelogin.md), [OneLogin IAM Platform (via Codeless Connector Framework)](connectors/oneloginiamlogsccpdefinition.md) | -| `OneLoginUsersV2_CL` | [OneLoginIAM](solutions/oneloginiam.md) | [[DEPRECATED] OneLogin IAM Platform](connectors/onelogin.md), [OneLogin IAM Platform (via Codeless Connector Framework)](connectors/oneloginiamlogsccpdefinition.md) | +| [`Okta_CL`](tables/okta-cl.md) | [Okta Single Sign-On](solutions/okta-single-sign-on.md) | [Okta Single Sign-On](connectors/oktasso.md), [Okta Single Sign-On](connectors/oktassov2.md) | +| [`Onapsis_Defend_CL`](tables/onapsis-defend-cl.md) | [Onapsis Defend](solutions/onapsis-defend.md) | [Onapsis Defend Integration](connectors/onapsis.md), [Onapsis Defend: Integrate Unmatched SAP Threat Detection & Intel with Microsoft Sentinel](connectors/onapsis.md) | +| [`OneLoginEventsV2_CL`](tables/onelogineventsv2-cl.md) | [OneLoginIAM](solutions/oneloginiam.md) | [[DEPRECATED] OneLogin IAM Platform](connectors/onelogin.md), [OneLogin IAM Platform (via Codeless Connector Framework)](connectors/oneloginiamlogsccpdefinition.md) | +| [`OneLoginUsersV2_CL`](tables/oneloginusersv2-cl.md) | [OneLoginIAM](solutions/oneloginiam.md) | [[DEPRECATED] OneLogin IAM Platform](connectors/onelogin.md), [OneLogin IAM Platform (via Codeless Connector Framework)](connectors/oneloginiamlogsccpdefinition.md) | | `OneLogin_CL` | [OneLoginIAM](solutions/oneloginiam.md) | [[DEPRECATED] OneLogin IAM Platform](connectors/onelogin.md) | -| `OnePasswordEventLogs_CL` | [1Password](solutions/1password.md) | [1Password](connectors/1password.md), [1Password (Serverless)](connectors/1passwordccpdefinition.md) | +| [`OnePasswordEventLogs_CL`](tables/onepasswordeventlogs-cl.md) | [1Password](solutions/1password.md) | [1Password](connectors/1password.md), [1Password (Serverless)](connectors/1passwordccpdefinition.md) | | `OneTrustMetadataV3_CL` | [OneTrust](solutions/onetrust.md) | [OneTrust](connectors/onetrustpush.md) | | `OracleWebLogicServer_CL` | [OracleWebLogicServer](solutions/oracleweblogicserver.md) | [[Deprecated] Oracle WebLogic Server](connectors/oracleweblogicserver.md) | | `OrcaAlerts_CL` | [Orca Security Alerts](solutions/orca-security-alerts.md) | [Orca Security Alerts](connectors/orcasecurityalerts.md) | @@ -690,11 +638,11 @@ This page lists **811 unique tables** ingested by connectors. | Table | Solutions | Connectors | |-------|-----------|------------| -| `PaloAltoCortexXDR_Alerts_CL` | [Cortex XDR](solutions/cortex-xdr.md), [Palo Alto Cortex XDR CCP](solutions/palo-alto-cortex-xdr-ccp.md) | [Palo Alto Cortex XDR](connectors/cortexxdrdataconnector.md) | -| `PaloAltoCortexXDR_Audit_Agent_CL` | [Cortex XDR](solutions/cortex-xdr.md), [Palo Alto Cortex XDR CCP](solutions/palo-alto-cortex-xdr-ccp.md) | [Palo Alto Cortex XDR](connectors/cortexxdrdataconnector.md) | -| `PaloAltoCortexXDR_Audit_Management_CL` | [Cortex XDR](solutions/cortex-xdr.md), [Palo Alto Cortex XDR CCP](solutions/palo-alto-cortex-xdr-ccp.md) | [Palo Alto Cortex XDR](connectors/cortexxdrdataconnector.md) | -| `PaloAltoCortexXDR_Endpoints_CL` | [Cortex XDR](solutions/cortex-xdr.md), [Palo Alto Cortex XDR CCP](solutions/palo-alto-cortex-xdr-ccp.md) | [Palo Alto Cortex XDR](connectors/cortexxdrdataconnector.md) | -| `PaloAltoCortexXDR_Incidents_CL` | [Cortex XDR](solutions/cortex-xdr.md), [Palo Alto Cortex XDR CCP](solutions/palo-alto-cortex-xdr-ccp.md) | [Palo Alto Cortex XDR](connectors/cortexxdrdataconnector.md) | +| [`PaloAltoCortexXDR_Alerts_CL`](tables/paloaltocortexxdr-alerts-cl.md) | [Cortex XDR](solutions/cortex-xdr.md), [Palo Alto Cortex XDR CCP](solutions/palo-alto-cortex-xdr-ccp.md) | [Palo Alto Cortex XDR](connectors/cortexxdrdataconnector.md) | +| [`PaloAltoCortexXDR_Audit_Agent_CL`](tables/paloaltocortexxdr-audit-agent-cl.md) | [Cortex XDR](solutions/cortex-xdr.md), [Palo Alto Cortex XDR CCP](solutions/palo-alto-cortex-xdr-ccp.md) | [Palo Alto Cortex XDR](connectors/cortexxdrdataconnector.md) | +| [`PaloAltoCortexXDR_Audit_Management_CL`](tables/paloaltocortexxdr-audit-management-cl.md) | [Cortex XDR](solutions/cortex-xdr.md), [Palo Alto Cortex XDR CCP](solutions/palo-alto-cortex-xdr-ccp.md) | [Palo Alto Cortex XDR](connectors/cortexxdrdataconnector.md) | +| [`PaloAltoCortexXDR_Endpoints_CL`](tables/paloaltocortexxdr-endpoints-cl.md) | [Cortex XDR](solutions/cortex-xdr.md), [Palo Alto Cortex XDR CCP](solutions/palo-alto-cortex-xdr-ccp.md) | [Palo Alto Cortex XDR](connectors/cortexxdrdataconnector.md) | +| [`PaloAltoCortexXDR_Incidents_CL`](tables/paloaltocortexxdr-incidents-cl.md) | [Cortex XDR](solutions/cortex-xdr.md), [Palo Alto Cortex XDR CCP](solutions/palo-alto-cortex-xdr-ccp.md) | [Palo Alto Cortex XDR](connectors/cortexxdrdataconnector.md) | | `PaloAltoPrismaCloudAlertV2_CL` | [PaloAltoPrismaCloud](solutions/paloaltoprismacloud.md) | [Palo Alto Prisma Cloud CSPM (via Codeless Connector Framework)](connectors/paloaltoprismacloudcspmccpdefinition.md) | | `PaloAltoPrismaCloudAlert_CL` | [PaloAltoPrismaCloud](solutions/paloaltoprismacloud.md) | [[DEPRECATED] Palo Alto Prisma Cloud CSPM](connectors/paloaltoprismacloud.md) | | `PaloAltoPrismaCloudAuditV2_CL` | [PaloAltoPrismaCloud](solutions/paloaltoprismacloud.md) | [Palo Alto Prisma Cloud CSPM (via Codeless Connector Framework)](connectors/paloaltoprismacloudcspmccpdefinition.md) | @@ -705,8 +653,7 @@ This page lists **811 unique tables** ingested by connectors. | `PingOne_AuditActivitiesV2_CL` | [PingOne](solutions/pingone.md) | [Ping One (via Codeless Connector Framework)](connectors/pingoneauditlogsccpdefinition.md) | | `PostgreSQL_CL` | [PostgreSQL](solutions/postgresql.md) | [[Deprecated] PostgreSQL Events](connectors/postgresql.md) | | `PowerBIActivity` | [Microsoft PowerBI](solutions/microsoft-powerbi.md) | [Microsoft PowerBI](connectors/officepowerbi.md) | -| `PrismaCloudCompute_CL` | [Palo Alto Prisma Cloud CWPP](solutions/palo-alto-prisma-cloud-cwpp.md) | [Palo Alto Prisma Cloud CWPP (using REST API)](connectors/paloaltoprismacloudcwpp.md), [Palo Alto Prisma Cloud CWPP (using REST API)](connectors/prismacloudcomputenativepoller.md) | -| `ProjectActivity` | [Microsoft Project](solutions/microsoft-project.md) | [Microsoft Project](connectors/office365project.md) | +| [`PrismaCloudCompute_CL`](tables/prismacloudcompute-cl.md) | [Palo Alto Prisma Cloud CWPP](solutions/palo-alto-prisma-cloud-cwpp.md) | [Palo Alto Prisma Cloud CWPP (using REST API)](connectors/paloaltoprismacloudcwpp.md), [Palo Alto Prisma Cloud CWPP (using REST API)](connectors/prismacloudcomputenativepoller.md) | | `ProofPointTAPClicksBlockedV2_CL` | [ProofPointTap](solutions/proofpointtap.md) | [Proofpoint TAP (via Codeless Connector Platform)](connectors/proofpointtapv2.md) | | `ProofPointTAPClicksBlocked_CL` | [ProofPointTap](solutions/proofpointtap.md) | [[Deprecated] Proofpoint TAP](connectors/proofpointtap.md) | | `ProofPointTAPClicksPermittedV2_CL` | [ProofPointTap](solutions/proofpointtap.md) | [Proofpoint TAP (via Codeless Connector Platform)](connectors/proofpointtapv2.md) | @@ -716,10 +663,9 @@ This page lists **811 unique tables** ingested by connectors. | `ProofPointTAPMessagesDeliveredV2_CL` | [ProofPointTap](solutions/proofpointtap.md) | [Proofpoint TAP (via Codeless Connector Platform)](connectors/proofpointtapv2.md) | | `ProofPointTAPMessagesDelivered_CL` | [ProofPointTap](solutions/proofpointtap.md) | [[Deprecated] Proofpoint TAP](connectors/proofpointtap.md) | | `ProofpointPODMailLog_CL` | [Proofpoint On demand(POD) Email Security](solutions/proofpoint-on-demand(pod)-email-security.md) | [Proofpoint On Demand Email Security (via Codeless Connector Platform)](connectors/proofpointccpdefinition.md) | -| `ProofpointPODMessage_CL` | [Proofpoint On demand(POD) Email Security](solutions/proofpoint-on-demand(pod)-email-security.md) | [Proofpoint On Demand Email Security (via Codeless Connector Platform)](connectors/proofpointccpdefinition.md), [[Deprecated] Proofpoint On Demand Email Security](connectors/proofpointpod.md) | +| [`ProofpointPODMessage_CL`](tables/proofpointpodmessage-cl.md) | [Proofpoint On demand(POD) Email Security](solutions/proofpoint-on-demand(pod)-email-security.md) | [Proofpoint On Demand Email Security (via Codeless Connector Platform)](connectors/proofpointccpdefinition.md), [[Deprecated] Proofpoint On Demand Email Security](connectors/proofpointpod.md) | | `ProofpointPOD_maillog_CL` | [Proofpoint On demand(POD) Email Security](solutions/proofpoint-on-demand(pod)-email-security.md) | [[Deprecated] Proofpoint On Demand Email Security](connectors/proofpointpod.md) | | `ProofpointPOD_message_CL` | [Proofpoint On demand(POD) Email Security](solutions/proofpoint-on-demand(pod)-email-security.md) | [[Deprecated] Proofpoint On Demand Email Security](connectors/proofpointpod.md) | -| `ProvisioningLogs` | [Microsoft Entra ID](solutions/microsoft-entra-id.md) | [Microsoft Entra ID](connectors/azureactivedirectory.md) | | `PurviewDataSensitivityLogs` | [Microsoft Purview](solutions/microsoft-purview.md) | [Microsoft Purview](connectors/microsoftazurepurview.md) | | `prancer_CL` | [Prancer PenSuiteAI Integration](solutions/prancer-pensuiteai-integration.md) | [Prancer Data Connector](connectors/prancerlogdata.md) | @@ -740,8 +686,6 @@ This page lists **811 unique tables** ingested by connectors. | `RSAIDPlus_AdminLogs_CL` | [RSAIDPlus_AdminLogs_Connector](solutions/rsaidplus-adminlogs-connector.md) | [RSA ID Plus Admin Logs Connector](connectors/rsaidplus-adminglogs-connector.md) | | `RedCanaryDetections_CL` | [Red Canary](solutions/red-canary.md) | [Red Canary Threat Detection](connectors/redcanarydataconnector.md) | | `Report_links_data_CL` | [CofenseTriage](solutions/cofensetriage.md) | [Cofense Triage Threat Indicators Ingestion](connectors/cofensetriage.md) | -| `RiskyServicePrincipals` | [Microsoft Entra ID](solutions/microsoft-entra-id.md) | [Microsoft Entra ID](connectors/azureactivedirectory.md) | -| `RiskyUsers` | [Microsoft Entra ID](solutions/microsoft-entra-id.md) | [Microsoft Entra ID](connectors/azureactivedirectory.md) | | `Rubrik_Anomaly_Data_CL` | [RubrikSecurityCloud](solutions/rubriksecuritycloud.md) | [Rubrik Security Cloud data connector](connectors/rubriksecuritycloudazurefunctions.md) | | `Rubrik_Events_Data_CL` | [RubrikSecurityCloud](solutions/rubriksecuritycloud.md) | [Rubrik Security Cloud data connector](connectors/rubriksecuritycloudazurefunctions.md) | | `Rubrik_Ransomware_Data_CL` | [RubrikSecurityCloud](solutions/rubriksecuritycloud.md) | [Rubrik Security Cloud data connector](connectors/rubriksecuritycloudazurefunctions.md) | @@ -757,9 +701,10 @@ This page lists **811 unique tables** ingested by connectors. | `SAPLogServ_CL` | [SAP LogServ](solutions/sap-logserv.md) | [SAP LogServ (RISE), S/4HANA Cloud private edition](connectors/saplogserv.md) | | `SIGNL4_CL` | [SIGNL4](solutions/signl4.md) | [Derdack SIGNL4](connectors/derdacksignl4.md) | | `SINECSecurityGuard_CL` | [SINEC Security Guard](solutions/sinec-security-guard.md) | [SINEC Security Guard](connectors/ssg.md) | +| `SOCPrimeAuditLogs_CL` | [SOC Prime CCF](solutions/soc-prime-ccf.md) | [SOC Prime Platform Audit Logs Data Connector](connectors/socprimeauditlogsdataconnector.md) | | `SailPointIDN_Events_CL` | [SailPointIdentityNow](solutions/sailpointidentitynow.md) | [SailPoint IdentityNow](connectors/sailpointidentitynow.md) | | `SailPointIDN_Triggers_CL` | [SailPointIdentityNow](solutions/sailpointidentitynow.md) | [SailPoint IdentityNow](connectors/sailpointidentitynow.md) | -| `SalesforceServiceCloudV2_CL` | [Salesforce Service Cloud](solutions/salesforce-service-cloud.md) | [[DEPRECATED] Salesforce Service Cloud](connectors/salesforceservicecloud.md), [Salesforce Service Cloud (via Codeless Connector Framework)](connectors/salesforceservicecloudccpdefinition.md) | +| [`SalesforceServiceCloudV2_CL`](tables/salesforceservicecloudv2-cl.md) | [Salesforce Service Cloud](solutions/salesforce-service-cloud.md) | [[DEPRECATED] Salesforce Service Cloud](connectors/salesforceservicecloud.md), [Salesforce Service Cloud (via Codeless Connector Framework)](connectors/salesforceservicecloudccpdefinition.md) | | `SalesforceServiceCloud_CL` | [Salesforce Service Cloud](solutions/salesforce-service-cloud.md) | [[DEPRECATED] Salesforce Service Cloud](connectors/salesforceservicecloud.md) | | `Samsung_Knox_Application_CL` | [Samsung Knox Asset Intelligence](solutions/samsung-knox-asset-intelligence.md) | [Samsung Knox Asset Intelligence](connectors/samsungdcdefinition.md) | | `Samsung_Knox_Audit_CL` | [Samsung Knox Asset Intelligence](solutions/samsung-knox-asset-intelligence.md) | [Samsung Knox Asset Intelligence](connectors/samsungdcdefinition.md) | @@ -767,10 +712,10 @@ This page lists **811 unique tables** ingested by connectors. | `Samsung_Knox_Process_CL` | [Samsung Knox Asset Intelligence](solutions/samsung-knox-asset-intelligence.md) | [Samsung Knox Asset Intelligence](connectors/samsungdcdefinition.md) | | `Samsung_Knox_System_CL` | [Samsung Knox Asset Intelligence](solutions/samsung-knox-asset-intelligence.md) | [Samsung Knox Asset Intelligence](connectors/samsungdcdefinition.md) | | `Samsung_Knox_User_CL` | [Samsung Knox Asset Intelligence](solutions/samsung-knox-asset-intelligence.md) | [Samsung Knox Asset Intelligence](connectors/samsungdcdefinition.md) | -| `SecurityAlert` | [Microsoft Defender For Identity](solutions/microsoft-defender-for-identity.md), [Microsoft Defender XDR](solutions/microsoft-defender-xdr.md), [Microsoft Defender for Cloud](solutions/microsoft-defender-for-cloud.md) +5 more | [Microsoft Entra ID Protection](connectors/azureactivedirectoryidentityprotection.md), [Microsoft Defender for Identity](connectors/azureadvancedthreatprotection.md), [Subscription-based Microsoft Defender for Cloud (Legacy)](connectors/azuresecuritycenter.md), [Microsoft Defender for Cloud Apps](connectors/microsoftcloudappsecurity.md), [Microsoft Defender for Endpoint](connectors/microsoftdefenderadvancedthreatprotection.md) +4 more | +| [`SecurityAlert`](tables/securityalert.md) | [Microsoft Defender for Cloud](solutions/microsoft-defender-for-cloud.md), [Microsoft Defender for Office 365](solutions/microsoft-defender-for-office-365.md) | [Tenant-based Microsoft Defender for Cloud](connectors/microsoftdefenderforcloudtenantbased.md), [Microsoft Defender for Office 365 (Preview)](connectors/officeatp.md) | | `SecurityBridgeLogs_CL` | [SecurityBridge App](solutions/securitybridge-app.md) | [SecurityBridge Threat Detection for SAP](connectors/securitybridgesap.md) | -| `SecurityEvent` | [Cyborg Security HUNTER](solutions/cyborg-security-hunter.md), [Microsoft Exchange Security - Exchange On-Premises](solutions/microsoft-exchange-security---exchange-on-premises.md), [Semperis Directory Services Protector](solutions/semperis-directory-services-protector.md) +1 more | [Cyborg Security HUNTER Hunt Packages](connectors/cyborgsecurity-hunter.md), [[Deprecated] Microsoft Exchange Logs and Events](connectors/esi-exchangeadminauditlogevents.md), [ Microsoft Active-Directory Domain Controllers Security Event Logs](connectors/esi-opt34domaincontrollerssecurityeventlogs.md), [Security Events via Legacy Agent](connectors/securityevents.md), [Semperis Directory Services Protector](connectors/semperisdsp.md) +1 more | -| `SecurityIncident` | [Microsoft Defender XDR](solutions/microsoft-defender-xdr.md), [SIGNL4](solutions/signl4.md) | [Derdack SIGNL4](connectors/derdacksignl4.md), [Microsoft Defender XDR](connectors/microsoftthreatprotection.md) | +| [`SecurityEvent`](tables/securityevent.md) | [Cyborg Security HUNTER](solutions/cyborg-security-hunter.md), [Microsoft Exchange Security - Exchange On-Premises](solutions/microsoft-exchange-security---exchange-on-premises.md), [Semperis Directory Services Protector](solutions/semperis-directory-services-protector.md) | [Cyborg Security HUNTER Hunt Packages](connectors/cyborgsecurity-hunter.md), [[Deprecated] Microsoft Exchange Logs and Events](connectors/esi-exchangeadminauditlogevents.md), [ Microsoft Active-Directory Domain Controllers Security Event Logs](connectors/esi-opt34domaincontrollerssecurityeventlogs.md), [Semperis Directory Services Protector](connectors/semperisdsp.md) | +| `SecurityIncident` | [SIGNL4](solutions/signl4.md) | [Derdack SIGNL4](connectors/derdacksignl4.md) | | `SecurityScorecardFactor_CL` | [SecurityScorecard Cybersecurity Ratings](solutions/securityscorecard-cybersecurity-ratings.md) | [SecurityScorecard Factor](connectors/securityscorecardfactorazurefunctions.md) | | `SecurityScorecardIssues_CL` | [SecurityScorecard Cybersecurity Ratings](solutions/securityscorecard-cybersecurity-ratings.md) | [SecurityScorecard Issue](connectors/securityscorecardissueazurefunctions.md) | | `SecurityScorecardRatings_CL` | [SecurityScorecard Cybersecurity Ratings](solutions/securityscorecard-cybersecurity-ratings.md) | [SecurityScorecard Cybersecurity Ratings](connectors/securityscorecardratingsazurefunctions.md) | @@ -784,11 +729,7 @@ This page lists **811 unique tables** ingested by connectors. | `SentinelOneThreats_CL` | [SentinelOne](solutions/sentinelone.md) | [SentinelOne](connectors/sentineloneccp.md) | | `SentinelOne_CL` | [SentinelOne](solutions/sentinelone.md) | [SentinelOne](connectors/sentinelone.md) | | `SeraphicWebSecurity_CL` | [SeraphicSecurity](solutions/seraphicsecurity.md) | [Seraphic Web Security](connectors/seraphicwebsecurity.md) | -| `ServicePrincipalRiskEvents` | [Microsoft Entra ID](solutions/microsoft-entra-id.md) | [Microsoft Entra ID](connectors/azureactivedirectory.md) | -| `ServicePrincipalSignInLogs` | [Microsoft Entra ID](solutions/microsoft-entra-id.md) | [Microsoft Entra ID](connectors/azureactivedirectory.md) | | `Sevco_Devices_CL` | [SevcoSecurity](solutions/sevcosecurity.md) | [Sevco Platform - Devices](connectors/sevcodevices.md) | -| `SignInLogs` | [Microsoft Entra ID](solutions/microsoft-entra-id.md) | [Microsoft Entra ID](connectors/azureactivedirectory.md) | -| `SigninLogs` | [Microsoft Entra ID](solutions/microsoft-entra-id.md) | [Microsoft Entra ID](connectors/azureactivedirectory.md) | | `SlackAuditNativePoller_CL` | [SlackAudit](solutions/slackaudit.md) | [Slack](connectors/slackaudit.md) | | `SlackAuditV2_CL` | [SlackAudit](solutions/slackaudit.md) | [SlackAudit (via Codeless Connector Framework)](connectors/slackauditlogsccpdefinition.md) | | `SlackAudit_CL` | [SlackAudit](solutions/slackaudit.md) | [[DEPRECATED] Slack Audit](connectors/slackauditapi.md) | @@ -809,15 +750,9 @@ This page lists **811 unique tables** ingested by connectors. | `SophosEPEvents_CL` | [Sophos Endpoint Protection](solutions/sophos-endpoint-protection.md) | [Sophos Endpoint Protection (using REST API)](connectors/sophosendpointprotectionccpdefinition.md) | | `SophosEP_CL` | [Sophos Endpoint Protection](solutions/sophos-endpoint-protection.md) | [Sophos Endpoint Protection](connectors/sophosep.md) | | `SquidProxy_CL` | [SquidProxy](solutions/squidproxy.md) | [[Deprecated] Squid Proxy](connectors/squidproxy.md) | -| `StorageBlobLogs` | [Azure Storage](solutions/azure-storage.md) | [Azure Storage Account](connectors/azurestorageaccount.md) | -| `StorageFileLogs` | [Azure Storage](solutions/azure-storage.md) | [Azure Storage Account](connectors/azurestorageaccount.md) | -| `StorageQueueLogs` | [Azure Storage](solutions/azure-storage.md) | [Azure Storage Account](connectors/azurestorageaccount.md) | -| `StorageTableLogs` | [Azure Storage](solutions/azure-storage.md) | [Azure Storage Account](connectors/azurestorageaccount.md) | | `StyxViewAlerts_CL` | [Styx Intelligence](solutions/styx-intelligence.md) | [StyxView Alerts (via Codeless Connector Platform)](connectors/styxviewendpointconnectordefinition.md) | -| `SymantecICDx_CL` | [Symantec Integrated Cyber Defense](solutions/symantec-integrated-cyber-defense.md) | [Symantec Integrated Cyber Defense Exchange](connectors/symantec.md) | -| `Syslog` | [Barracuda CloudGen Firewall](solutions/barracuda-cloudgen-firewall.md), [Blackberry CylancePROTECT](solutions/blackberry-cylanceprotect.md), [CTERA](solutions/ctera.md) +31 more | [[Deprecated] Barracuda CloudGen Firewall](connectors/barracudacloudfirewall.md), [[Deprecated] Blackberry CylancePROTECT](connectors/blackberrycylanceprotect.md), [CTERA Syslog](connectors/ctera.md), [[Deprecated] Cisco Application Centric Infrastructure](connectors/ciscoaci.md), [[Deprecated] Cisco Identity Services Engine](connectors/ciscoise.md) +30 more | +| [`Syslog`](tables/syslog.md) | [Barracuda CloudGen Firewall](solutions/barracuda-cloudgen-firewall.md), [CTERA](solutions/ctera.md), [Cisco ACI](solutions/cisco-aci.md) [+30 more](tables/syslog.md) | [[Deprecated] Barracuda CloudGen Firewall](connectors/barracudacloudfirewall.md), [CTERA Syslog](connectors/ctera.md), [[Deprecated] Cisco Application Centric Infrastructure](connectors/ciscoaci.md), [[Deprecated] Cisco Identity Services Engine](connectors/ciscoise.md), [Cisco Software Defined WAN](connectors/ciscosdwan.md) [+29 more](tables/syslog.md) | | `secRMM_CL` | [Squadra Technologies SecRmm](solutions/squadra-technologies-secrmm.md) | [Squadra Technologies secRMM](connectors/squadratechnologiessecrmm.md) | -| `sharePoint` | [Microsoft 365](solutions/microsoft-365.md) | [Microsoft 365 (formerly, Office 365)](connectors/office365.md) | | `signIns` | [Okta Single Sign-On](solutions/okta-single-sign-on.md) | [Okta Single Sign-On (Preview)](connectors/oktassov2.md) | ## T @@ -837,11 +772,10 @@ This page lists **811 unique tables** ingested by connectors. | `TheHive_CL` | [TheHive](solutions/thehive.md) | [TheHive Project - TheHive](connectors/thehiveprojectthehive.md) | | `TheomAlerts_CL` | [Theom](solutions/theom.md) | [Theom](connectors/theom.md) | | `ThreatIntelExportOperation` | [Threat Intelligence (NEW)](solutions/threat-intelligence-(new).md) | [Threat intelligence - TAXII Export (Preview)](connectors/threatintelligencetaxiiexport.md) | -| `ThreatIntelIndicators` | [Lumen Defender Threat Feed](solutions/lumen-defender-threat-feed.md), [Threat Intelligence (NEW)](solutions/threat-intelligence-(new).md) | [Lumen Defender Threat Feed Data Connector](connectors/lumenthreatfeedconnector.md), [Microsoft Defender Threat Intelligence](connectors/microsoftdefenderthreatintelligence.md), [Premium Microsoft Defender Threat Intelligence](connectors/premiummicrosoftdefenderforthreatintelligence.md), [Threat Intelligence Platforms](connectors/threatintelligence.md), [Threat intelligence - TAXII](connectors/threatintelligencetaxii.md) +1 more | -| `ThreatIntelObjects` | [Threat Intelligence (NEW)](solutions/threat-intelligence-(new).md) | [Microsoft Defender Threat Intelligence](connectors/microsoftdefenderthreatintelligence.md), [Premium Microsoft Defender Threat Intelligence](connectors/premiummicrosoftdefenderforthreatintelligence.md), [Threat Intelligence Platforms](connectors/threatintelligence.md), [Threat intelligence - TAXII](connectors/threatintelligencetaxii.md), [Threat Intelligence Upload API (Preview)](connectors/threatintelligenceuploadindicatorsapi.md) | -| `ThreatIntelligenceIndicator` | [CofenseIntelligence](solutions/cofenseintelligence.md), [CofenseTriage](solutions/cofensetriage.md), [CognyteLuminar](solutions/cognyteluminar.md) +7 more | [Cofense Intelligence Threat Indicators Ingestion](connectors/cofenseintelligence.md), [Cofense Triage Threat Indicators Ingestion](connectors/cofensetriage.md), [Luminar IOCs and Leaked Credentials](connectors/cognyteluminar.md), [CrowdStrike Falcon Adversary Intelligence ](connectors/crowdstrikefalconadversaryintelligence.md), [Datalake2Sentinel](connectors/datalake2sentinelconnector.md) +9 more | +| [`ThreatIntelIndicators`](tables/threatintelindicators.md) | [Lumen Defender Threat Feed](solutions/lumen-defender-threat-feed.md), [Threat Intelligence (NEW)](solutions/threat-intelligence-(new).md) | [Lumen Defender Threat Feed Data Connector](connectors/lumenthreatfeedconnector.md), [Microsoft Defender Threat Intelligence](connectors/microsoftdefenderthreatintelligence.md), [Premium Microsoft Defender Threat Intelligence](connectors/premiummicrosoftdefenderforthreatintelligence.md), [Threat Intelligence Platforms](connectors/threatintelligence.md), [Threat intelligence - TAXII](connectors/threatintelligencetaxii.md) [+1 more](tables/threatintelindicators.md) | +| [`ThreatIntelObjects`](tables/threatintelobjects.md) | [Threat Intelligence (NEW)](solutions/threat-intelligence-(new).md) | [Microsoft Defender Threat Intelligence](connectors/microsoftdefenderthreatintelligence.md), [Premium Microsoft Defender Threat Intelligence](connectors/premiummicrosoftdefenderforthreatintelligence.md), [Threat Intelligence Platforms](connectors/threatintelligence.md), [Threat intelligence - TAXII](connectors/threatintelligencetaxii.md), [Threat Intelligence Upload API (Preview)](connectors/threatintelligenceuploadindicatorsapi.md) | +| [`ThreatIntelligenceIndicator`](tables/threatintelligenceindicator.md) | [CofenseIntelligence](solutions/cofenseintelligence.md), [CofenseTriage](solutions/cofensetriage.md), [CognyteLuminar](solutions/cognyteluminar.md) [+7 more](tables/threatintelligenceindicator.md) | [Cofense Intelligence Threat Indicators Ingestion](connectors/cofenseintelligence.md), [Cofense Triage Threat Indicators Ingestion](connectors/cofensetriage.md), [Luminar IOCs and Leaked Credentials](connectors/cognyteluminar.md), [CrowdStrike Falcon Adversary Intelligence ](connectors/crowdstrikefalconadversaryintelligence.md), [Datalake2Sentinel](connectors/datalake2sentinelconnector.md) [+9 more](tables/threatintelligenceindicator.md) | | `Tomcat_CL` | [Tomcat](solutions/tomcat.md) | [[Deprecated] Apache Tomcat](connectors/apachetomcat.md) | -| `TransmitSecurityActivity_CL` | [TransmitSecurity](solutions/transmitsecurity.md) | [Transmit Security Connector](connectors/transmitsecurity.md) | | `TrendMicroCAS_CL` | [Trend Micro Cloud App Security](solutions/trend-micro-cloud-app-security.md) | [Trend Micro Cloud App Security](connectors/trendmicrocas.md) | | `TrendMicro_XDR_OAT_CL` | [Trend Micro Vision One](solutions/trend-micro-vision-one.md) | [Trend Vision One](connectors/trendmicroxdr.md) | | `TrendMicro_XDR_RCA_Result_CL` | [Trend Micro Vision One](solutions/trend-micro-vision-one.md) | [Trend Vision One](connectors/trendmicroxdr.md) | @@ -850,15 +784,12 @@ This page lists **811 unique tables** ingested by connectors. | `Ttp_Attachment_CL` | [Mimecast](solutions/mimecast.md) | [Mimecast Targeted Threat Protection](connectors/mimecastttpapi.md) | | `Ttp_Impersonation_CL` | [Mimecast](solutions/mimecast.md) | [Mimecast Targeted Threat Protection](connectors/mimecastttpapi.md) | | `Ttp_Url_CL` | [Mimecast](solutions/mimecast.md) | [Mimecast Targeted Threat Protection](connectors/mimecastttpapi.md) | -| `teams` | [Microsoft 365](solutions/microsoft-365.md) | [Microsoft 365 (formerly, Office 365)](connectors/office365.md) | ## U | Table | Solutions | Connectors | |-------|-----------|------------| | `Ubiquiti_CL` | [Ubiquiti UniFi](solutions/ubiquiti-unifi.md) | [[Deprecated] Ubiquiti UniFi](connectors/ubiquitiunifi.md) | -| `UrlClickEvents` | [Microsoft Defender XDR](solutions/microsoft-defender-xdr.md) | [Microsoft Defender XDR](connectors/microsoftthreatprotection.md) | -| `UserRiskEvents` | [Microsoft Entra ID](solutions/microsoft-entra-id.md) | [Microsoft Entra ID](connectors/azureactivedirectory.md) | ## V @@ -903,9 +834,7 @@ This page lists **811 unique tables** ingested by connectors. | Table | Solutions | Connectors | |-------|-----------|------------| -| `W3CIISLog` | [Microsoft Exchange Security - Exchange On-Premises](solutions/microsoft-exchange-security---exchange-on-premises.md) | [[Deprecated] Microsoft Exchange Logs and Events](connectors/esi-exchangeadminauditlogevents.md), [IIS Logs of Microsoft Exchange Servers](connectors/esi-opt5exchangeiislogs.md) | -| `WindowsEvent` | [Windows Forwarded Events](solutions/windows-forwarded-events.md) | [Windows Forwarded Events](connectors/windowsforwardedevents.md) | -| `WindowsFirewall` | [Windows Firewall](solutions/windows-firewall.md) | [Windows Firewall](connectors/windowsfirewall.md) | +| [`W3CIISLog`](tables/w3ciislog.md) | [Microsoft Exchange Security - Exchange On-Premises](solutions/microsoft-exchange-security---exchange-on-premises.md) | [[Deprecated] Microsoft Exchange Logs and Events](connectors/esi-exchangeadminauditlogevents.md), [IIS Logs of Microsoft Exchange Servers](connectors/esi-opt5exchangeiislogs.md) | | `WizAuditLogsV2_CL` | [Wiz](solutions/wiz.md) | [Wiz](connectors/wiz.md) | | `WizAuditLogs_CL` | [Wiz](solutions/wiz.md) | [Wiz](connectors/wiz.md) | | `WizIssuesV2_CL` | [Wiz](solutions/wiz.md) | [Wiz](connectors/wiz.md) | diff --git a/Tools/Solutions Analyzer/connector-docs/tables/abapauditlog.md b/Tools/Solutions Analyzer/connector-docs/tables/abapauditlog.md new file mode 100644 index 00000000000..d7eef66e24b --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/abapauditlog.md @@ -0,0 +1,33 @@ +# ABAPAuditLog + +**Table:** `ABAPAuditLog` + +This table is ingested by **3 solution(s)** using **3 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (3) + +This table is used by the following solutions: + +- [Pathlock_TDnR](../solutions/pathlock-tdnr.md) +- [SAP S4 Cloud Public Edition](../solutions/sap-s4-cloud-public-edition.md) +- [SecurityBridge App](../solutions/securitybridge-app.md) + +## Connectors (3) + +This table is ingested by the following connectors: + +- [Pathlock Inc.: Threat Detection and Response for SAP](../connectors/pathlock-tdnr.md) +- [SAP S/4HANA Cloud Public Edition](../connectors/saps4publicalerts.md) +- [SecurityBridge Solution for SAP](../connectors/securitybridge.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/armis-activities-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/armis-activities-cl.md new file mode 100644 index 00000000000..bc9936df2b6 --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/armis-activities-cl.md @@ -0,0 +1,30 @@ +# Armis_Activities_CL + +**Table:** `Armis_Activities_CL` + +This table is ingested by **1 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (1) + +This table is used by the following solutions: + +- [Armis](../solutions/armis.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [Armis Activities](../connectors/armisactivities.md) +- [Armis Alerts Activities](../connectors/armisalertsactivities.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/armis-alerts-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/armis-alerts-cl.md new file mode 100644 index 00000000000..0fdd6ab00c6 --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/armis-alerts-cl.md @@ -0,0 +1,30 @@ +# Armis_Alerts_CL + +**Table:** `Armis_Alerts_CL` + +This table is ingested by **1 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (1) + +This table is used by the following solutions: + +- [Armis](../solutions/armis.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [Armis Alerts](../connectors/armisalerts.md) +- [Armis Alerts Activities](../connectors/armisalertsactivities.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/asimauditeventlogs.md b/Tools/Solutions Analyzer/connector-docs/tables/asimauditeventlogs.md new file mode 100644 index 00000000000..0872122a7d9 --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/asimauditeventlogs.md @@ -0,0 +1,33 @@ +# ASimAuditEventLogs + +**Table:** `ASimAuditEventLogs` + +This table is ingested by **3 solution(s)** using **3 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (3) + +This table is used by the following solutions: + +- [Cisco Meraki Events via REST API](../solutions/cisco-meraki-events-via-rest-api.md) +- [CrowdStrike Falcon Endpoint Protection](../solutions/crowdstrike-falcon-endpoint-protection.md) +- [Workday](../solutions/workday.md) + +## Connectors (3) + +This table is ingested by the following connectors: + +- [Cisco Meraki (using REST API)](../connectors/ciscomerakimultirule.md) +- [CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)](../connectors/crowdstrikereplicatorv2.md) +- [Workday User Activity](../connectors/workdayccpdefinition.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/asimauthenticationeventlogs.md b/Tools/Solutions Analyzer/connector-docs/tables/asimauthenticationeventlogs.md new file mode 100644 index 00000000000..4205f3db94d --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/asimauthenticationeventlogs.md @@ -0,0 +1,31 @@ +# ASimAuthenticationEventLogs + +**Table:** `ASimAuthenticationEventLogs` + +This table is ingested by **2 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (2) + +This table is used by the following solutions: + +- [CrowdStrike Falcon Endpoint Protection](../solutions/crowdstrike-falcon-endpoint-protection.md) +- [VMware Carbon Black Cloud](../solutions/vmware-carbon-black-cloud.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)](../connectors/crowdstrikereplicatorv2.md) +- [VMware Carbon Black Cloud via AWS S3](../connectors/carbonblackawss3.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/asimdnsactivitylogs.md b/Tools/Solutions Analyzer/connector-docs/tables/asimdnsactivitylogs.md new file mode 100644 index 00000000000..5db9561e6fe --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/asimdnsactivitylogs.md @@ -0,0 +1,31 @@ +# ASimDnsActivityLogs + +**Table:** `ASimDnsActivityLogs` + +This table is ingested by **2 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (2) + +This table is used by the following solutions: + +- [CrowdStrike Falcon Endpoint Protection](../solutions/crowdstrike-falcon-endpoint-protection.md) +- [Windows Server DNS](../solutions/windows-server-dns.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [Windows DNS Events via AMA](../connectors/asimdnsactivitylogs.md) +- [CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)](../connectors/crowdstrikereplicatorv2.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/asimfileeventlogs.md b/Tools/Solutions Analyzer/connector-docs/tables/asimfileeventlogs.md new file mode 100644 index 00000000000..5bcb22772a1 --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/asimfileeventlogs.md @@ -0,0 +1,31 @@ +# ASimFileEventLogs + +**Table:** `ASimFileEventLogs` + +This table is ingested by **2 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (2) + +This table is used by the following solutions: + +- [CrowdStrike Falcon Endpoint Protection](../solutions/crowdstrike-falcon-endpoint-protection.md) +- [VMware Carbon Black Cloud](../solutions/vmware-carbon-black-cloud.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)](../connectors/crowdstrikereplicatorv2.md) +- [VMware Carbon Black Cloud via AWS S3](../connectors/carbonblackawss3.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/asimnetworksessionlogs.md b/Tools/Solutions Analyzer/connector-docs/tables/asimnetworksessionlogs.md new file mode 100644 index 00000000000..31396f3c798 --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/asimnetworksessionlogs.md @@ -0,0 +1,33 @@ +# ASimNetworkSessionLogs + +**Table:** `ASimNetworkSessionLogs` + +This table is ingested by **3 solution(s)** using **3 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (3) + +This table is used by the following solutions: + +- [Cisco Meraki Events via REST API](../solutions/cisco-meraki-events-via-rest-api.md) +- [CrowdStrike Falcon Endpoint Protection](../solutions/crowdstrike-falcon-endpoint-protection.md) +- [VMware Carbon Black Cloud](../solutions/vmware-carbon-black-cloud.md) + +## Connectors (3) + +This table is ingested by the following connectors: + +- [Cisco Meraki (using REST API)](../connectors/ciscomerakimultirule.md) +- [CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)](../connectors/crowdstrikereplicatorv2.md) +- [VMware Carbon Black Cloud via AWS S3](../connectors/carbonblackawss3.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/asimprocesseventlogs.md b/Tools/Solutions Analyzer/connector-docs/tables/asimprocesseventlogs.md new file mode 100644 index 00000000000..c167f4b51a8 --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/asimprocesseventlogs.md @@ -0,0 +1,31 @@ +# ASimProcessEventLogs + +**Table:** `ASimProcessEventLogs` + +This table is ingested by **2 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (2) + +This table is used by the following solutions: + +- [CrowdStrike Falcon Endpoint Protection](../solutions/crowdstrike-falcon-endpoint-protection.md) +- [VMware Carbon Black Cloud](../solutions/vmware-carbon-black-cloud.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)](../connectors/crowdstrikereplicatorv2.md) +- [VMware Carbon Black Cloud via AWS S3](../connectors/carbonblackawss3.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/asimregistryeventlogs.md b/Tools/Solutions Analyzer/connector-docs/tables/asimregistryeventlogs.md new file mode 100644 index 00000000000..94eaf087336 --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/asimregistryeventlogs.md @@ -0,0 +1,31 @@ +# ASimRegistryEventLogs + +**Table:** `ASimRegistryEventLogs` + +This table is ingested by **2 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (2) + +This table is used by the following solutions: + +- [CrowdStrike Falcon Endpoint Protection](../solutions/crowdstrike-falcon-endpoint-protection.md) +- [VMware Carbon Black Cloud](../solutions/vmware-carbon-black-cloud.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)](../connectors/crowdstrikereplicatorv2.md) +- [VMware Carbon Black Cloud via AWS S3](../connectors/carbonblackawss3.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/awscloudtrail.md b/Tools/Solutions Analyzer/connector-docs/tables/awscloudtrail.md new file mode 100644 index 00000000000..33ca2a10dd1 --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/awscloudtrail.md @@ -0,0 +1,30 @@ +# AWSCloudTrail + +**Table:** `AWSCloudTrail` + +This table is ingested by **1 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (1) + +This table is used by the following solutions: + +- [Amazon Web Services](../solutions/amazon-web-services.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [Amazon Web Services](../connectors/aws.md) +- [Amazon Web Services S3](../connectors/awss3.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/awsvpcflow.md b/Tools/Solutions Analyzer/connector-docs/tables/awsvpcflow.md new file mode 100644 index 00000000000..e39d326c752 --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/awsvpcflow.md @@ -0,0 +1,31 @@ +# AWSVPCFlow + +**Table:** `AWSVPCFlow` + +This table is ingested by **2 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (2) + +This table is used by the following solutions: + +- [AWS VPC Flow Logs](../solutions/aws-vpc-flow-logs.md) +- [Amazon Web Services](../solutions/amazon-web-services.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [Amazon Web Services S3 VPC Flow Logs](../connectors/awss3vpcflowlogsparquetdefinition.md) +- [Amazon Web Services S3](../connectors/awss3.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/azurediagnostics.md b/Tools/Solutions Analyzer/connector-docs/tables/azurediagnostics.md new file mode 100644 index 00000000000..af6f26bcdf2 --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/azurediagnostics.md @@ -0,0 +1,55 @@ +# AzureDiagnostics + +**Table:** `AzureDiagnostics` + +This table is ingested by **14 solution(s)** using **14 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (14) + +This table is used by the following solutions: + +- [Azure Batch Account](../solutions/azure-batch-account.md) +- [Azure Cognitive Search](../solutions/azure-cognitive-search.md) +- [Azure DDoS Protection](../solutions/azure-ddos-protection.md) +- [Azure Data Lake Storage Gen1](../solutions/azure-data-lake-storage-gen1.md) +- [Azure Event Hubs](../solutions/azure-event-hubs.md) +- [Azure Firewall](../solutions/azure-firewall.md) +- [Azure Key Vault](../solutions/azure-key-vault.md) +- [Azure Logic Apps](../solutions/azure-logic-apps.md) +- [Azure Network Security Groups](../solutions/azure-network-security-groups.md) +- [Azure SQL Database solution for sentinel](../solutions/azure-sql-database-solution-for-sentinel.md) +- [Azure Service Bus](../solutions/azure-service-bus.md) +- [Azure Stream Analytics](../solutions/azure-stream-analytics.md) +- [Azure Web Application Firewall (WAF)](../solutions/azure-web-application-firewall-(waf).md) +- [Azure kubernetes Service](../solutions/azure-kubernetes-service.md) + +## Connectors (14) + +This table is ingested by the following connectors: + +- [Azure Batch Account](../connectors/azurebatchaccount-ccp.md) +- [Azure Cognitive Search](../connectors/azurecognitivesearch-ccp.md) +- [Azure Data Lake Storage Gen1](../connectors/azuredatalakestoragegen1-ccp.md) +- [Azure Event Hub](../connectors/azureeventhub-ccp.md) +- [Azure Firewall](../connectors/azurefirewall.md) +- [Azure Key Vault](../connectors/azurekeyvault.md) +- [Azure Kubernetes Service (AKS)](../connectors/azurekubernetes.md) +- [Azure Logic Apps](../connectors/azurelogicapps-ccp.md) +- [Network Security Groups](../connectors/azurensg.md) +- [Azure Service Bus](../connectors/azureservicebus-ccp.md) +- [Azure SQL Databases](../connectors/azuresql.md) +- [Azure Stream Analytics](../connectors/azurestreamanalytics-ccp.md) +- [Azure DDoS Protection](../connectors/ddos.md) +- [Azure Web Application Firewall (WAF)](../connectors/waf.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/boxevents-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/boxevents-cl.md new file mode 100644 index 00000000000..908124690f0 --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/boxevents-cl.md @@ -0,0 +1,30 @@ +# BoxEvents_CL + +**Table:** `BoxEvents_CL` + +This table is ingested by **1 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (1) + +This table is used by the following solutions: + +- [Box](../solutions/box.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [Box](../connectors/boxdataconnector.md) +- [Box Events (CCP)](../connectors/boxeventsccpdefinition.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/cisco-umbrella-audit-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/cisco-umbrella-audit-cl.md new file mode 100644 index 00000000000..4e44d3e4acb --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/cisco-umbrella-audit-cl.md @@ -0,0 +1,30 @@ +# Cisco_Umbrella_audit_CL + +**Table:** `Cisco_Umbrella_audit_CL` + +This table is ingested by **1 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (1) + +This table is used by the following solutions: + +- [CiscoUmbrella](../solutions/ciscoumbrella.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [Cisco Cloud Security](../connectors/ciscoumbrelladataconnector.md) +- [Cisco Cloud Security (using elastic premium plan)](../connectors/ciscoumbrelladataconnectorelasticpremium.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/cisco-umbrella-cloudfirewall-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/cisco-umbrella-cloudfirewall-cl.md new file mode 100644 index 00000000000..922578a5a03 --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/cisco-umbrella-cloudfirewall-cl.md @@ -0,0 +1,30 @@ +# Cisco_Umbrella_cloudfirewall_CL + +**Table:** `Cisco_Umbrella_cloudfirewall_CL` + +This table is ingested by **1 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (1) + +This table is used by the following solutions: + +- [CiscoUmbrella](../solutions/ciscoumbrella.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [Cisco Cloud Security](../connectors/ciscoumbrelladataconnector.md) +- [Cisco Cloud Security (using elastic premium plan)](../connectors/ciscoumbrelladataconnectorelasticpremium.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/cisco-umbrella-dlp-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/cisco-umbrella-dlp-cl.md new file mode 100644 index 00000000000..b8cc1b07a40 --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/cisco-umbrella-dlp-cl.md @@ -0,0 +1,30 @@ +# Cisco_Umbrella_dlp_CL + +**Table:** `Cisco_Umbrella_dlp_CL` + +This table is ingested by **1 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (1) + +This table is used by the following solutions: + +- [CiscoUmbrella](../solutions/ciscoumbrella.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [Cisco Cloud Security](../connectors/ciscoumbrelladataconnector.md) +- [Cisco Cloud Security (using elastic premium plan)](../connectors/ciscoumbrelladataconnectorelasticpremium.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/cisco-umbrella-dns-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/cisco-umbrella-dns-cl.md new file mode 100644 index 00000000000..de7eff380ea --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/cisco-umbrella-dns-cl.md @@ -0,0 +1,30 @@ +# Cisco_Umbrella_dns_CL + +**Table:** `Cisco_Umbrella_dns_CL` + +This table is ingested by **1 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (1) + +This table is used by the following solutions: + +- [CiscoUmbrella](../solutions/ciscoumbrella.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [Cisco Cloud Security](../connectors/ciscoumbrelladataconnector.md) +- [Cisco Cloud Security (using elastic premium plan)](../connectors/ciscoumbrelladataconnectorelasticpremium.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/cisco-umbrella-fileevent-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/cisco-umbrella-fileevent-cl.md new file mode 100644 index 00000000000..2b1989c1a7d --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/cisco-umbrella-fileevent-cl.md @@ -0,0 +1,30 @@ +# Cisco_Umbrella_fileevent_CL + +**Table:** `Cisco_Umbrella_fileevent_CL` + +This table is ingested by **1 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (1) + +This table is used by the following solutions: + +- [CiscoUmbrella](../solutions/ciscoumbrella.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [Cisco Cloud Security](../connectors/ciscoumbrelladataconnector.md) +- [Cisco Cloud Security (using elastic premium plan)](../connectors/ciscoumbrelladataconnectorelasticpremium.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/cisco-umbrella-firewall-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/cisco-umbrella-firewall-cl.md new file mode 100644 index 00000000000..0091ac9dd59 --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/cisco-umbrella-firewall-cl.md @@ -0,0 +1,30 @@ +# Cisco_Umbrella_firewall_CL + +**Table:** `Cisco_Umbrella_firewall_CL` + +This table is ingested by **1 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (1) + +This table is used by the following solutions: + +- [CiscoUmbrella](../solutions/ciscoumbrella.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [Cisco Cloud Security](../connectors/ciscoumbrelladataconnector.md) +- [Cisco Cloud Security (using elastic premium plan)](../connectors/ciscoumbrelladataconnectorelasticpremium.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/cisco-umbrella-intrusion-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/cisco-umbrella-intrusion-cl.md new file mode 100644 index 00000000000..56472ab25e9 --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/cisco-umbrella-intrusion-cl.md @@ -0,0 +1,30 @@ +# Cisco_Umbrella_intrusion_CL + +**Table:** `Cisco_Umbrella_intrusion_CL` + +This table is ingested by **1 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (1) + +This table is used by the following solutions: + +- [CiscoUmbrella](../solutions/ciscoumbrella.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [Cisco Cloud Security](../connectors/ciscoumbrelladataconnector.md) +- [Cisco Cloud Security (using elastic premium plan)](../connectors/ciscoumbrelladataconnectorelasticpremium.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/cisco-umbrella-ip-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/cisco-umbrella-ip-cl.md new file mode 100644 index 00000000000..97c1698f870 --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/cisco-umbrella-ip-cl.md @@ -0,0 +1,30 @@ +# Cisco_Umbrella_ip_CL + +**Table:** `Cisco_Umbrella_ip_CL` + +This table is ingested by **1 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (1) + +This table is used by the following solutions: + +- [CiscoUmbrella](../solutions/ciscoumbrella.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [Cisco Cloud Security](../connectors/ciscoumbrelladataconnector.md) +- [Cisco Cloud Security (using elastic premium plan)](../connectors/ciscoumbrelladataconnectorelasticpremium.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/cisco-umbrella-proxy-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/cisco-umbrella-proxy-cl.md new file mode 100644 index 00000000000..3432728b01c --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/cisco-umbrella-proxy-cl.md @@ -0,0 +1,30 @@ +# Cisco_Umbrella_proxy_CL + +**Table:** `Cisco_Umbrella_proxy_CL` + +This table is ingested by **1 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (1) + +This table is used by the following solutions: + +- [CiscoUmbrella](../solutions/ciscoumbrella.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [Cisco Cloud Security](../connectors/ciscoumbrelladataconnector.md) +- [Cisco Cloud Security (using elastic premium plan)](../connectors/ciscoumbrelladataconnectorelasticpremium.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/cisco-umbrella-ravpnlogs-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/cisco-umbrella-ravpnlogs-cl.md new file mode 100644 index 00000000000..bc7213bc39d --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/cisco-umbrella-ravpnlogs-cl.md @@ -0,0 +1,30 @@ +# Cisco_Umbrella_ravpnlogs_CL + +**Table:** `Cisco_Umbrella_ravpnlogs_CL` + +This table is ingested by **1 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (1) + +This table is used by the following solutions: + +- [CiscoUmbrella](../solutions/ciscoumbrella.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [Cisco Cloud Security](../connectors/ciscoumbrelladataconnector.md) +- [Cisco Cloud Security (using elastic premium plan)](../connectors/ciscoumbrelladataconnectorelasticpremium.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/cisco-umbrella-ztaflow-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/cisco-umbrella-ztaflow-cl.md new file mode 100644 index 00000000000..349f4130f7c --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/cisco-umbrella-ztaflow-cl.md @@ -0,0 +1,30 @@ +# Cisco_Umbrella_ztaflow_CL + +**Table:** `Cisco_Umbrella_ztaflow_CL` + +This table is ingested by **1 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (1) + +This table is used by the following solutions: + +- [CiscoUmbrella](../solutions/ciscoumbrella.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [Cisco Cloud Security](../connectors/ciscoumbrelladataconnector.md) +- [Cisco Cloud Security (using elastic premium plan)](../connectors/ciscoumbrelladataconnectorelasticpremium.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/cisco-umbrella-ztna-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/cisco-umbrella-ztna-cl.md new file mode 100644 index 00000000000..3658ab6a864 --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/cisco-umbrella-ztna-cl.md @@ -0,0 +1,30 @@ +# Cisco_Umbrella_ztna_CL + +**Table:** `Cisco_Umbrella_ztna_CL` + +This table is ingested by **1 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (1) + +This table is used by the following solutions: + +- [CiscoUmbrella](../solutions/ciscoumbrella.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [Cisco Cloud Security](../connectors/ciscoumbrelladataconnector.md) +- [Cisco Cloud Security (using elastic premium plan)](../connectors/ciscoumbrelladataconnectorelasticpremium.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/cloudflarev2-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/cloudflarev2-cl.md new file mode 100644 index 00000000000..61e0cc5f09b --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/cloudflarev2-cl.md @@ -0,0 +1,30 @@ +# CloudflareV2_CL + +**Table:** `CloudflareV2_CL` + +This table is ingested by **2 solution(s)** using **1 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (2) + +This table is used by the following solutions: + +- [Cloudflare](../solutions/cloudflare.md) +- [Cloudflare CCF](../solutions/cloudflare-ccf.md) + +## Connectors (1) + +This table is ingested by the following connectors: + +- [Cloudflare (Using Blob Container) (via Codeless Connector Framework)](../connectors/cloudflaredefinition.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/commonsecuritylog.md b/Tools/Solutions Analyzer/connector-docs/tables/commonsecuritylog.md new file mode 100644 index 00000000000..00590d0b9c0 --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/commonsecuritylog.md @@ -0,0 +1,172 @@ +# CommonSecurityLog + +**Table:** `CommonSecurityLog` + +This table is ingested by **54 solution(s)** using **91 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (54) + +This table is used by the following solutions: + +- [AI Analyst Darktrace](../solutions/ai-analyst-darktrace.md) +- [Akamai Security Events](../solutions/akamai-security-events.md) +- [AristaAwakeSecurity](../solutions/aristaawakesecurity.md) +- [Aruba ClearPass](../solutions/aruba-clearpass.md) +- [Barracuda WAF](../solutions/barracuda-waf.md) +- [Broadcom SymantecDLP](../solutions/broadcom-symantecdlp.md) +- [Cisco Firepower EStreamer](../solutions/cisco-firepower-estreamer.md) +- [CiscoSEG](../solutions/ciscoseg.md) +- [Citrix Web App Firewall](../solutions/citrix-web-app-firewall.md) +- [Claroty](../solutions/claroty.md) +- [Claroty xDome](../solutions/claroty-xdome.md) +- [Contrast Protect](../solutions/contrast-protect.md) +- [CrowdStrike Falcon Endpoint Protection](../solutions/crowdstrike-falcon-endpoint-protection.md) +- [CyberArk Enterprise Password Vault (EPV) Events](../solutions/cyberark-enterprise-password-vault-(epv)-events.md) +- [Delinea Secret Server](../solutions/delinea-secret-server.md) +- [ExtraHop Reveal(x)](../solutions/extrahop-reveal(x).md) +- [F5 Networks](../solutions/f5-networks.md) +- [FireEye Network Security](../solutions/fireeye-network-security.md) +- [Forcepoint CASB](../solutions/forcepoint-casb.md) +- [Forcepoint CSG](../solutions/forcepoint-csg.md) +- [Forcepoint NGFW](../solutions/forcepoint-ngfw.md) +- [ForgeRock Common Audit for CEF](../solutions/forgerock-common-audit-for-cef.md) +- [Fortinet FortiGate Next-Generation Firewall connector for Microsoft Sentinel](../solutions/fortinet-fortigate-next-generation-firewall-connector-for-microsoft-sentinel.md) +- [Fortinet FortiWeb Cloud WAF-as-a-Service connector for Microsoft Sentinel](../solutions/fortinet-fortiweb-cloud-waf-as-a-service-connector-for-microsoft-sentinel.md) +- [Illumio Core](../solutions/illumio-core.md) +- [Illusive Platform](../solutions/illusive-platform.md) +- [Imperva WAF Gateway](../solutions/imperva-waf-gateway.md) +- [Infoblox](../solutions/infoblox.md) +- [Infoblox Cloud Data Connector](../solutions/infoblox-cloud-data-connector.md) +- [Infoblox SOC Insights](../solutions/infoblox-soc-insights.md) +- [Netwrix Auditor](../solutions/netwrix-auditor.md) +- [NozomiNetworks](../solutions/nozominetworks.md) +- [OSSEC](../solutions/ossec.md) +- [Onapsis Platform](../solutions/onapsis-platform.md) +- [Palo Alto - XDR (Cortex)](../solutions/palo-alto---xdr-(cortex).md) +- [PaloAlto-PAN-OS](../solutions/paloalto-pan-os.md) +- [PaloAltoCDL](../solutions/paloaltocdl.md) +- [PingFederate](../solutions/pingfederate.md) +- [Radiflow](../solutions/radiflow.md) +- [RidgeSecurity](../solutions/ridgesecurity.md) +- [Silverfort](../solutions/silverfort.md) +- [SonicWall Firewall](../solutions/sonicwall-firewall.md) +- [Threat Intelligence](../solutions/threat-intelligence.md) +- [Threat Intelligence (NEW)](../solutions/threat-intelligence-(new).md) +- [Trend Micro Apex One](../solutions/trend-micro-apex-one.md) +- [Trend Micro Deep Security](../solutions/trend-micro-deep-security.md) +- [Trend Micro TippingPoint](../solutions/trend-micro-tippingpoint.md) +- [Vectra AI Detect](../solutions/vectra-ai-detect.md) +- [VirtualMetric DataStream](../solutions/virtualmetric-datastream.md) +- [Votiro](../solutions/votiro.md) +- [WireX Network Forensics Platform](../solutions/wirex-network-forensics-platform.md) +- [WithSecureElementsViaConnector](../solutions/withsecureelementsviaconnector.md) +- [iboss](../solutions/iboss.md) +- [vArmour Application Controller](../solutions/varmour-application-controller.md) + +## Connectors (91) + +This table is ingested by the following connectors: + +- [[Deprecated] Vectra AI Detect via Legacy Agent](../connectors/aivectradetect.md) +- [[Deprecated] Vectra AI Detect via AMA](../connectors/aivectradetectama.md) +- [[Deprecated] Akamai Security Events via Legacy Agent](../connectors/akamaisecurityevents.md) +- [[Deprecated] Akamai Security Events via AMA](../connectors/akamaisecurityeventsama.md) +- [[Deprecated] Awake Security via Legacy Agent](../connectors/aristaawakesecurity.md) +- [[Deprecated] Aruba ClearPass via Legacy Agent](../connectors/arubaclearpass.md) +- [[Deprecated] Aruba ClearPass via AMA](../connectors/arubaclearpassama.md) +- [[Deprecated] Barracuda Web Application Firewall via Legacy Agent](../connectors/barracuda.md) +- [[Deprecated] Broadcom Symantec DLP via Legacy Agent](../connectors/broadcomsymantecdlp.md) +- [[Deprecated] Broadcom Symantec DLP via AMA](../connectors/broadcomsymantecdlpama.md) +- [[Deprecated] Cisco Firepower eStreamer via Legacy Agent](../connectors/ciscofirepowerestreamer.md) +- [[Deprecated] Cisco Firepower eStreamer via AMA](../connectors/ciscofirepowerestreamerama.md) +- [[Deprecated] Cisco Secure Email Gateway via Legacy Agent](../connectors/ciscoseg.md) +- [[Deprecated] Cisco Secure Email Gateway via AMA](../connectors/ciscosegama.md) +- [[Deprecated] Citrix WAF (Web App Firewall) via Legacy Agent](../connectors/citrixwaf.md) +- [[Deprecated] Citrix WAF (Web App Firewall) via AMA](../connectors/citrixwafama.md) +- [[Deprecated] Claroty via Legacy Agent](../connectors/claroty.md) +- [[Deprecated] Claroty via AMA](../connectors/clarotyama.md) +- [Claroty xDome](../connectors/clarotyxdome.md) +- [[Deprecated] Contrast Protect via Legacy Agent](../connectors/contrastprotect.md) +- [[Deprecated] Contrast Protect via AMA](../connectors/contrastprotectama.md) +- [[Deprecated] CrowdStrike Falcon Endpoint Protection via Legacy Agent](../connectors/crowdstrikefalconendpointprotection.md) +- [[Deprecated] CrowdStrike Falcon Endpoint Protection via AMA](../connectors/crowdstrikefalconendpointprotectionama.md) +- [[Deprecated] CyberArk Enterprise Password Vault (EPV) Events via Legacy Agent](../connectors/cyberark.md) +- [[Deprecated] CyberArk Privilege Access Manager (PAM) Events via AMA](../connectors/cyberarkama.md) +- [[Deprecated] AI Analyst Darktrace via Legacy Agent](../connectors/darktrace.md) +- [[Deprecated] AI Analyst Darktrace via AMA](../connectors/darktraceama.md) +- [[Deprecated] Delinea Secret Server via AMA](../connectors/delineasecretserverama.md) +- [[Deprecated] Delinea Secret Server via Legacy Agent](../connectors/delineasecretserver-cef.md) +- [[Deprecated] ExtraHop Reveal(x) via Legacy Agent](../connectors/extrahopnetworks.md) +- [[Deprecated] ExtraHop Reveal(x) via AMA](../connectors/extrahopnetworksama.md) +- [[Deprecated] F5 Networks via Legacy Agent](../connectors/f5.md) +- [[Deprecated] F5 Networks via AMA](../connectors/f5ama.md) +- [[Deprecated] FireEye Network Security (NX) via Legacy Agent](../connectors/fireeyenx.md) +- [[Deprecated] FireEye Network Security (NX) via AMA](../connectors/fireeyenxama.md) +- [[Deprecated] Forcepoint CSG via Legacy Agent](../connectors/forcepointcsg.md) +- [[Deprecated] Forcepoint CSG via AMA](../connectors/forcepointcsgama.md) +- [[Deprecated] Forcepoint CASB via Legacy Agent](../connectors/forcepointcasb.md) +- [[Deprecated] Forcepoint CASB via AMA](../connectors/forcepointcasbama.md) +- [[Deprecated] Forcepoint NGFW via Legacy Agent](../connectors/forcepointngfw.md) +- [[Deprecated] Forcepoint NGFW via AMA](../connectors/forcepointngfwama.md) +- [[Deprecated] ForgeRock Identity Platform](../connectors/forgerock.md) +- [[Deprecated] Fortinet via Legacy Agent](../connectors/fortinet.md) +- [[Deprecated] Fortinet via AMA](../connectors/fortinetama.md) +- [[Deprecated] Fortinet FortiWeb Web Application Firewall via Legacy Agent](../connectors/fortinetfortiweb.md) +- [Fortinet FortiWeb Web Application Firewall via AMA](../connectors/fortinetfortiwebama.md) +- [[Deprecated] Illumio Core via Legacy Agent](../connectors/illumiocore.md) +- [[Deprecated] Illumio Core via AMA](../connectors/illumiocoreama.md) +- [Imperva WAF Gateway](../connectors/impervawafgateway.md) +- [[Deprecated] Infoblox Cloud Data Connector via Legacy Agent](../connectors/infobloxclouddataconnector.md) +- [[Deprecated] Infoblox Cloud Data Connector via AMA](../connectors/infobloxclouddataconnectorama.md) +- [[Deprecated] Infoblox SOC Insight Data Connector via AMA](../connectors/infobloxsocinsightsdataconnector-ama.md) +- [[Recommended] Infoblox SOC Insight Data Connector via AMA](../connectors/infobloxsocinsightsdataconnector-ama.md) +- [[Deprecated] Infoblox SOC Insight Data Connector via Legacy Agent](../connectors/infobloxsocinsightsdataconnector-legacy.md) +- [[Deprecated] Netwrix Auditor via Legacy Agent](../connectors/netwrix.md) +- [[Deprecated] Netwrix Auditor via AMA](../connectors/netwrixama.md) +- [[Deprecated] Nozomi Networks N2OS via Legacy Agent](../connectors/nozominetworksn2os.md) +- [[Deprecated] Nozomi Networks N2OS via AMA](../connectors/nozominetworksn2osama.md) +- [[Deprecated] OSSEC via Legacy Agent](../connectors/ossec.md) +- [[Deprecated] OSSEC via AMA](../connectors/ossecama.md) +- [[Deprecated] Onapsis Platform](../connectors/onapsisplatform.md) +- [[Deprecated] Palo Alto Networks Cortex Data Lake (CDL) via Legacy Agent](../connectors/paloaltocdl.md) +- [[Deprecated] Palo Alto Networks Cortex Data Lake (CDL) via AMA](../connectors/paloaltocdlama.md) +- [[Deprecated] Palo Alto Networks (Firewall) via Legacy Agent](../connectors/paloaltonetworks.md) +- [[Deprecated] Palo Alto Networks (Firewall) via AMA](../connectors/paloaltonetworksama.md) +- [Palo Alto Networks Cortex XDR](../connectors/paloaltonetworkscortex.md) +- [[Deprecated] PingFederate via Legacy Agent](../connectors/pingfederate.md) +- [[Deprecated] PingFederate via AMA](../connectors/pingfederateama.md) +- [Radiflow iSID via AMA](../connectors/radiflowisid.md) +- [[Deprecated] RIDGEBOT - data connector for Microsoft Sentinel](../connectors/ridgebotdataconnector.md) +- [Silverfort Admin Console](../connectors/silverfortama.md) +- [[Deprecated] SonicWall Firewall via Legacy Agent](../connectors/sonicwallfirewall.md) +- [[Deprecated] SonicWall Firewall via AMA](../connectors/sonicwallfirewallama.md) +- [Threat Intelligence Platforms](../connectors/threatintelligence.md) +- [[Deprecated] Trend Micro Deep Security via Legacy](../connectors/trendmicro.md) +- [[Deprecated] Trend Micro Apex One via Legacy Agent](../connectors/trendmicroapexone.md) +- [[Deprecated] Trend Micro Apex One via AMA](../connectors/trendmicroapexoneama.md) +- [[Deprecated] Trend Micro TippingPoint via Legacy](../connectors/trendmicrotippingpoint.md) +- [VirtualMetric Director Proxy](../connectors/virtualmetricdirectorproxy.md) +- [VirtualMetric DataStream for Microsoft Sentinel](../connectors/virtualmetricmssentinelconnector.md) +- [VirtualMetric DataStream for Microsoft Sentinel data lake](../connectors/virtualmetricmssentineldatalakeconnector.md) +- [[Deprecated] Votiro Sanitization Engine Logs](../connectors/votiro.md) +- [[Deprecated] WireX Network Forensics Platform via Legacy Agent](../connectors/wirex-systems-nfp.md) +- [[Deprecated] WireX Network Forensics Platform via AMA](../connectors/wirex-systems-nfpama.md) +- [[Deprecated] WithSecure Elements via Connector](../connectors/withsecureelementsviaconnector.md) +- [[Deprecated] iboss via Legacy Agent](../connectors/iboss.md) +- [iboss via AMA](../connectors/ibossama.md) +- [[Deprecated] Illusive Platform via Legacy Agent](../connectors/illusiveattackmanagementsystem.md) +- [[Deprecated] Illusive Platform via AMA](../connectors/illusiveattackmanagementsystemama.md) +- [[Deprecated] vArmour Application Controller via Legacy Agent](../connectors/varmourac.md) +- [[Deprecated] vArmour Application Controller via AMA](../connectors/varmouracama.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/crowdstrike-additional-events-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/crowdstrike-additional-events-cl.md new file mode 100644 index 00000000000..1c394524675 --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/crowdstrike-additional-events-cl.md @@ -0,0 +1,30 @@ +# CrowdStrike_Additional_Events_CL + +**Table:** `CrowdStrike_Additional_Events_CL` + +This table is ingested by **1 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (1) + +This table is used by the following solutions: + +- [CrowdStrike Falcon Endpoint Protection](../solutions/crowdstrike-falcon-endpoint-protection.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)](../connectors/crowdstrikefalcons3ccpdefinition.md) +- [CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)](../connectors/crowdstrikereplicatorv2.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/crowdstrike-secondary-data-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/crowdstrike-secondary-data-cl.md new file mode 100644 index 00000000000..03af6e5e4cb --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/crowdstrike-secondary-data-cl.md @@ -0,0 +1,30 @@ +# CrowdStrike_Secondary_Data_CL + +**Table:** `CrowdStrike_Secondary_Data_CL` + +This table is ingested by **1 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (1) + +This table is used by the following solutions: + +- [CrowdStrike Falcon Endpoint Protection](../solutions/crowdstrike-falcon-endpoint-protection.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)](../connectors/crowdstrikefalcons3ccpdefinition.md) +- [CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)](../connectors/crowdstrikereplicatorv2.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/cyeraassets-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/cyeraassets-cl.md new file mode 100644 index 00000000000..6e34b2ff0b2 --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/cyeraassets-cl.md @@ -0,0 +1,30 @@ +# CyeraAssets_CL + +**Table:** `CyeraAssets_CL` + +This table is ingested by **1 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (1) + +This table is used by the following solutions: + +- [CyeraDSPM](../solutions/cyeradspm.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [Cyera DSPM Microsoft Sentinel Data Connector](../connectors/cyeradspmccf.md) +- [Cyera DSPM Azure Functions Microsoft Sentinel Data Connector](../connectors/cyerafunctionsconnector.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/cyeraassets-ms-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/cyeraassets-ms-cl.md new file mode 100644 index 00000000000..3fcb7434859 --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/cyeraassets-ms-cl.md @@ -0,0 +1,30 @@ +# CyeraAssets_MS_CL + +**Table:** `CyeraAssets_MS_CL` + +This table is ingested by **1 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (1) + +This table is used by the following solutions: + +- [CyeraDSPM](../solutions/cyeradspm.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [Cyera DSPM Microsoft Sentinel Data Connector](../connectors/cyeradspmccf.md) +- [Cyera DSPM Azure Functions Microsoft Sentinel Data Connector](../connectors/cyerafunctionsconnector.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/cyeraclassifications-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/cyeraclassifications-cl.md new file mode 100644 index 00000000000..4b284d6e955 --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/cyeraclassifications-cl.md @@ -0,0 +1,30 @@ +# CyeraClassifications_CL + +**Table:** `CyeraClassifications_CL` + +This table is ingested by **1 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (1) + +This table is used by the following solutions: + +- [CyeraDSPM](../solutions/cyeradspm.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [Cyera DSPM Microsoft Sentinel Data Connector](../connectors/cyeradspmccf.md) +- [Cyera DSPM Azure Functions Microsoft Sentinel Data Connector](../connectors/cyerafunctionsconnector.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/cyeraidentities-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/cyeraidentities-cl.md new file mode 100644 index 00000000000..e4e29f30d9c --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/cyeraidentities-cl.md @@ -0,0 +1,30 @@ +# CyeraIdentities_CL + +**Table:** `CyeraIdentities_CL` + +This table is ingested by **1 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (1) + +This table is used by the following solutions: + +- [CyeraDSPM](../solutions/cyeradspm.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [Cyera DSPM Microsoft Sentinel Data Connector](../connectors/cyeradspmccf.md) +- [Cyera DSPM Azure Functions Microsoft Sentinel Data Connector](../connectors/cyerafunctionsconnector.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/cyeraissues-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/cyeraissues-cl.md new file mode 100644 index 00000000000..9b27ef53e68 --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/cyeraissues-cl.md @@ -0,0 +1,30 @@ +# CyeraIssues_CL + +**Table:** `CyeraIssues_CL` + +This table is ingested by **1 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (1) + +This table is used by the following solutions: + +- [CyeraDSPM](../solutions/cyeradspm.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [Cyera DSPM Microsoft Sentinel Data Connector](../connectors/cyeradspmccf.md) +- [Cyera DSPM Azure Functions Microsoft Sentinel Data Connector](../connectors/cyerafunctionsconnector.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/event.md b/Tools/Solutions Analyzer/connector-docs/tables/event.md new file mode 100644 index 00000000000..8f7f7eae65a --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/event.md @@ -0,0 +1,35 @@ +# Event + +**Table:** `Event` + +This table is ingested by **3 solution(s)** using **5 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (3) + +This table is used by the following solutions: + +- [ALC-WebCTRL](../solutions/alc-webctrl.md) +- [Microsoft Exchange Security - Exchange On-Premises](../solutions/microsoft-exchange-security---exchange-on-premises.md) +- [MimecastTIRegional](../solutions/mimecasttiregional.md) + +## Connectors (5) + +This table is ingested by the following connectors: + +- [Automated Logic WebCTRL ](../connectors/automatedlogicwebctrl.md) +- [[Deprecated] Microsoft Exchange Logs and Events](../connectors/esi-exchangeadminauditlogevents.md) +- [Microsoft Exchange Admin Audit Logs by Event Logs](../connectors/esi-opt1exchangeadminauditlogsbyeventlogs.md) +- [Microsoft Exchange Logs and Events](../connectors/esi-opt2exchangeserverseventlogs.md) +- [Mimecast Intelligence for Microsoft - Microsoft Sentinel](../connectors/mimecasttiregionalconnectorazurefunctions.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/exchangehttpproxy-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/exchangehttpproxy-cl.md new file mode 100644 index 00000000000..81abfe82943 --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/exchangehttpproxy-cl.md @@ -0,0 +1,30 @@ +# ExchangeHttpProxy_CL + +**Table:** `ExchangeHttpProxy_CL` + +This table is ingested by **1 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (1) + +This table is used by the following solutions: + +- [Microsoft Exchange Security - Exchange On-Premises](../solutions/microsoft-exchange-security---exchange-on-premises.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [[Deprecated] Microsoft Exchange Logs and Events](../connectors/esi-exchangeadminauditlogevents.md) +- [Microsoft Exchange HTTP Proxy Logs](../connectors/esi-opt7exchangehttpproxylogs.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/infobloxinsight-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/infobloxinsight-cl.md new file mode 100644 index 00000000000..91a15e5d0cc --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/infobloxinsight-cl.md @@ -0,0 +1,30 @@ +# InfobloxInsight_CL + +**Table:** `InfobloxInsight_CL` + +This table is ingested by **2 solution(s)** using **1 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (2) + +This table is used by the following solutions: + +- [Infoblox](../solutions/infoblox.md) +- [Infoblox SOC Insights](../solutions/infoblox-soc-insights.md) + +## Connectors (1) + +This table is ingested by the following connectors: + +- [Infoblox SOC Insight Data Connector via REST API](../connectors/infobloxsocinsightsdataconnector-api.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/meraki-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/meraki-cl.md new file mode 100644 index 00000000000..94f0c38dbce --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/meraki-cl.md @@ -0,0 +1,30 @@ +# meraki_CL + +**Table:** `meraki_CL` + +This table is ingested by **1 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (1) + +This table is used by the following solutions: + +- [CiscoMeraki](../solutions/ciscomeraki.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [[Deprecated] Cisco Meraki](../connectors/ciscomeraki.md) +- [Cisco Meraki (using REST API)](../connectors/ciscomerakinativepoller.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/messagetrackinglog-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/messagetrackinglog-cl.md new file mode 100644 index 00000000000..c8aae0f8f30 --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/messagetrackinglog-cl.md @@ -0,0 +1,30 @@ +# MessageTrackingLog_CL + +**Table:** `MessageTrackingLog_CL` + +This table is ingested by **1 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (1) + +This table is used by the following solutions: + +- [Microsoft Exchange Security - Exchange On-Premises](../solutions/microsoft-exchange-security---exchange-on-premises.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [[Deprecated] Microsoft Exchange Logs and Events](../connectors/esi-exchangeadminauditlogevents.md) +- [Microsoft Exchange Message Tracking Logs](../connectors/esi-opt6exchangemessagetrackinglogs.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/okta-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/okta-cl.md new file mode 100644 index 00000000000..daf3da4c0b8 --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/okta-cl.md @@ -0,0 +1,30 @@ +# Okta_CL + +**Table:** `Okta_CL` + +This table is ingested by **1 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (1) + +This table is used by the following solutions: + +- [Okta Single Sign-On](../solutions/okta-single-sign-on.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [Okta Single Sign-On](../connectors/oktasso.md) +- [Okta Single Sign-On](../connectors/oktassov2.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/onapsis-defend-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/onapsis-defend-cl.md new file mode 100644 index 00000000000..56161ae1f86 --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/onapsis-defend-cl.md @@ -0,0 +1,30 @@ +# Onapsis_Defend_CL + +**Table:** `Onapsis_Defend_CL` + +This table is ingested by **1 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (1) + +This table is used by the following solutions: + +- [Onapsis Defend](../solutions/onapsis-defend.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [Onapsis Defend Integration](../connectors/onapsis.md) +- [Onapsis Defend: Integrate Unmatched SAP Threat Detection & Intel with Microsoft Sentinel](../connectors/onapsis.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/onelogineventsv2-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/onelogineventsv2-cl.md new file mode 100644 index 00000000000..506e27dfac2 --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/onelogineventsv2-cl.md @@ -0,0 +1,30 @@ +# OneLoginEventsV2_CL + +**Table:** `OneLoginEventsV2_CL` + +This table is ingested by **1 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (1) + +This table is used by the following solutions: + +- [OneLoginIAM](../solutions/oneloginiam.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [[DEPRECATED] OneLogin IAM Platform](../connectors/onelogin.md) +- [OneLogin IAM Platform (via Codeless Connector Framework)](../connectors/oneloginiamlogsccpdefinition.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/oneloginusersv2-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/oneloginusersv2-cl.md new file mode 100644 index 00000000000..0d3eca2b8b3 --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/oneloginusersv2-cl.md @@ -0,0 +1,30 @@ +# OneLoginUsersV2_CL + +**Table:** `OneLoginUsersV2_CL` + +This table is ingested by **1 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (1) + +This table is used by the following solutions: + +- [OneLoginIAM](../solutions/oneloginiam.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [[DEPRECATED] OneLogin IAM Platform](../connectors/onelogin.md) +- [OneLogin IAM Platform (via Codeless Connector Framework)](../connectors/oneloginiamlogsccpdefinition.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/onepasswordeventlogs-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/onepasswordeventlogs-cl.md new file mode 100644 index 00000000000..65224ead5ea --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/onepasswordeventlogs-cl.md @@ -0,0 +1,30 @@ +# OnePasswordEventLogs_CL + +**Table:** `OnePasswordEventLogs_CL` + +This table is ingested by **1 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (1) + +This table is used by the following solutions: + +- [1Password](../solutions/1password.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [1Password](../connectors/1password.md) +- [1Password (Serverless)](../connectors/1passwordccpdefinition.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/paloaltocortexxdr-alerts-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/paloaltocortexxdr-alerts-cl.md new file mode 100644 index 00000000000..039c298afb0 --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/paloaltocortexxdr-alerts-cl.md @@ -0,0 +1,30 @@ +# PaloAltoCortexXDR_Alerts_CL + +**Table:** `PaloAltoCortexXDR_Alerts_CL` + +This table is ingested by **2 solution(s)** using **1 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (2) + +This table is used by the following solutions: + +- [Cortex XDR](../solutions/cortex-xdr.md) +- [Palo Alto Cortex XDR CCP](../solutions/palo-alto-cortex-xdr-ccp.md) + +## Connectors (1) + +This table is ingested by the following connectors: + +- [Palo Alto Cortex XDR](../connectors/cortexxdrdataconnector.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/paloaltocortexxdr-audit-agent-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/paloaltocortexxdr-audit-agent-cl.md new file mode 100644 index 00000000000..affbdd42270 --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/paloaltocortexxdr-audit-agent-cl.md @@ -0,0 +1,30 @@ +# PaloAltoCortexXDR_Audit_Agent_CL + +**Table:** `PaloAltoCortexXDR_Audit_Agent_CL` + +This table is ingested by **2 solution(s)** using **1 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (2) + +This table is used by the following solutions: + +- [Cortex XDR](../solutions/cortex-xdr.md) +- [Palo Alto Cortex XDR CCP](../solutions/palo-alto-cortex-xdr-ccp.md) + +## Connectors (1) + +This table is ingested by the following connectors: + +- [Palo Alto Cortex XDR](../connectors/cortexxdrdataconnector.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/paloaltocortexxdr-audit-management-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/paloaltocortexxdr-audit-management-cl.md new file mode 100644 index 00000000000..506293cb9aa --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/paloaltocortexxdr-audit-management-cl.md @@ -0,0 +1,30 @@ +# PaloAltoCortexXDR_Audit_Management_CL + +**Table:** `PaloAltoCortexXDR_Audit_Management_CL` + +This table is ingested by **2 solution(s)** using **1 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (2) + +This table is used by the following solutions: + +- [Cortex XDR](../solutions/cortex-xdr.md) +- [Palo Alto Cortex XDR CCP](../solutions/palo-alto-cortex-xdr-ccp.md) + +## Connectors (1) + +This table is ingested by the following connectors: + +- [Palo Alto Cortex XDR](../connectors/cortexxdrdataconnector.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/paloaltocortexxdr-endpoints-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/paloaltocortexxdr-endpoints-cl.md new file mode 100644 index 00000000000..0c821cde19d --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/paloaltocortexxdr-endpoints-cl.md @@ -0,0 +1,30 @@ +# PaloAltoCortexXDR_Endpoints_CL + +**Table:** `PaloAltoCortexXDR_Endpoints_CL` + +This table is ingested by **2 solution(s)** using **1 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (2) + +This table is used by the following solutions: + +- [Cortex XDR](../solutions/cortex-xdr.md) +- [Palo Alto Cortex XDR CCP](../solutions/palo-alto-cortex-xdr-ccp.md) + +## Connectors (1) + +This table is ingested by the following connectors: + +- [Palo Alto Cortex XDR](../connectors/cortexxdrdataconnector.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/paloaltocortexxdr-incidents-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/paloaltocortexxdr-incidents-cl.md new file mode 100644 index 00000000000..49f4f4ae21d --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/paloaltocortexxdr-incidents-cl.md @@ -0,0 +1,30 @@ +# PaloAltoCortexXDR_Incidents_CL + +**Table:** `PaloAltoCortexXDR_Incidents_CL` + +This table is ingested by **2 solution(s)** using **1 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (2) + +This table is used by the following solutions: + +- [Cortex XDR](../solutions/cortex-xdr.md) +- [Palo Alto Cortex XDR CCP](../solutions/palo-alto-cortex-xdr-ccp.md) + +## Connectors (1) + +This table is ingested by the following connectors: + +- [Palo Alto Cortex XDR](../connectors/cortexxdrdataconnector.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/prismacloudcompute-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/prismacloudcompute-cl.md new file mode 100644 index 00000000000..e628158204e --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/prismacloudcompute-cl.md @@ -0,0 +1,30 @@ +# PrismaCloudCompute_CL + +**Table:** `PrismaCloudCompute_CL` + +This table is ingested by **1 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (1) + +This table is used by the following solutions: + +- [Palo Alto Prisma Cloud CWPP](../solutions/palo-alto-prisma-cloud-cwpp.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [Palo Alto Prisma Cloud CWPP (using REST API)](../connectors/paloaltoprismacloudcwpp.md) +- [Palo Alto Prisma Cloud CWPP (using REST API)](../connectors/prismacloudcomputenativepoller.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/proofpointpodmessage-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/proofpointpodmessage-cl.md new file mode 100644 index 00000000000..755d33e2768 --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/proofpointpodmessage-cl.md @@ -0,0 +1,30 @@ +# ProofpointPODMessage_CL + +**Table:** `ProofpointPODMessage_CL` + +This table is ingested by **1 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (1) + +This table is used by the following solutions: + +- [Proofpoint On demand(POD) Email Security](../solutions/proofpoint-on-demand(pod)-email-security.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [Proofpoint On Demand Email Security (via Codeless Connector Platform)](../connectors/proofpointccpdefinition.md) +- [[Deprecated] Proofpoint On Demand Email Security](../connectors/proofpointpod.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/salesforceservicecloudv2-cl.md b/Tools/Solutions Analyzer/connector-docs/tables/salesforceservicecloudv2-cl.md new file mode 100644 index 00000000000..ea0bf6fe9aa --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/salesforceservicecloudv2-cl.md @@ -0,0 +1,30 @@ +# SalesforceServiceCloudV2_CL + +**Table:** `SalesforceServiceCloudV2_CL` + +This table is ingested by **1 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (1) + +This table is used by the following solutions: + +- [Salesforce Service Cloud](../solutions/salesforce-service-cloud.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [[DEPRECATED] Salesforce Service Cloud](../connectors/salesforceservicecloud.md) +- [Salesforce Service Cloud (via Codeless Connector Framework)](../connectors/salesforceservicecloudccpdefinition.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/securityalert.md b/Tools/Solutions Analyzer/connector-docs/tables/securityalert.md new file mode 100644 index 00000000000..aef23182e4c --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/securityalert.md @@ -0,0 +1,31 @@ +# SecurityAlert + +**Table:** `SecurityAlert` + +This table is ingested by **2 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (2) + +This table is used by the following solutions: + +- [Microsoft Defender for Cloud](../solutions/microsoft-defender-for-cloud.md) +- [Microsoft Defender for Office 365](../solutions/microsoft-defender-for-office-365.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [Tenant-based Microsoft Defender for Cloud](../connectors/microsoftdefenderforcloudtenantbased.md) +- [Microsoft Defender for Office 365 (Preview)](../connectors/officeatp.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/securityevent.md b/Tools/Solutions Analyzer/connector-docs/tables/securityevent.md new file mode 100644 index 00000000000..2b84313b502 --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/securityevent.md @@ -0,0 +1,34 @@ +# SecurityEvent + +**Table:** `SecurityEvent` + +This table is ingested by **3 solution(s)** using **4 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (3) + +This table is used by the following solutions: + +- [Cyborg Security HUNTER](../solutions/cyborg-security-hunter.md) +- [Microsoft Exchange Security - Exchange On-Premises](../solutions/microsoft-exchange-security---exchange-on-premises.md) +- [Semperis Directory Services Protector](../solutions/semperis-directory-services-protector.md) + +## Connectors (4) + +This table is ingested by the following connectors: + +- [Cyborg Security HUNTER Hunt Packages](../connectors/cyborgsecurity-hunter.md) +- [[Deprecated] Microsoft Exchange Logs and Events](../connectors/esi-exchangeadminauditlogevents.md) +- [ Microsoft Active-Directory Domain Controllers Security Event Logs](../connectors/esi-opt34domaincontrollerssecurityeventlogs.md) +- [Semperis Directory Services Protector](../connectors/semperisdsp.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/securityincident.md b/Tools/Solutions Analyzer/connector-docs/tables/securityincident.md new file mode 100644 index 00000000000..92a165a4afe --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/securityincident.md @@ -0,0 +1,31 @@ +# SecurityIncident + +**Table:** `SecurityIncident` + +This table is ingested by **2 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (2) + +This table is used by the following solutions: + +- [Microsoft Defender XDR](../solutions/microsoft-defender-xdr.md) +- [SIGNL4](../solutions/signl4.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [Derdack SIGNL4](../connectors/derdacksignl4.md) +- [Microsoft Defender XDR](../connectors/microsoftthreatprotection.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/syslog.md b/Tools/Solutions Analyzer/connector-docs/tables/syslog.md new file mode 100644 index 00000000000..679e0653e3f --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/syslog.md @@ -0,0 +1,94 @@ +# Syslog + +**Table:** `Syslog` + +This table is ingested by **33 solution(s)** using **34 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (33) + +This table is used by the following solutions: + +- [Barracuda CloudGen Firewall](../solutions/barracuda-cloudgen-firewall.md) +- [CTERA](../solutions/ctera.md) +- [Cisco ACI](../solutions/cisco-aci.md) +- [Cisco ISE](../solutions/cisco-ise.md) +- [Cisco SD-WAN](../solutions/cisco-sd-wan.md) +- [Cisco Secure Cloud Analytics](../solutions/cisco-secure-cloud-analytics.md) +- [Cisco UCS](../solutions/cisco-ucs.md) +- [CiscoWSA](../solutions/ciscowsa.md) +- [Citrix ADC](../solutions/citrix-adc.md) +- [Digital Guardian Data Loss Prevention](../solutions/digital-guardian-data-loss-prevention.md) +- [ESETPROTECT](../solutions/esetprotect.md) +- [Exabeam Advanced Analytics](../solutions/exabeam-advanced-analytics.md) +- [Forescout (Legacy)](../solutions/forescout-(legacy).md) +- [GitLab](../solutions/gitlab.md) +- [ISC Bind](../solutions/isc-bind.md) +- [Infoblox NIOS](../solutions/infoblox-nios.md) +- [Ivanti Unified Endpoint Management](../solutions/ivanti-unified-endpoint-management.md) +- [Juniper SRX](../solutions/juniper-srx.md) +- [McAfee Network Security Platform](../solutions/mcafee-network-security-platform.md) +- [McAfee ePolicy Orchestrator](../solutions/mcafee-epolicy-orchestrator.md) +- [Microsoft Sysmon For Linux](../solutions/microsoft-sysmon-for-linux.md) +- [Nasuni](../solutions/nasuni.md) +- [OpenVPN](../solutions/openvpn.md) +- [OracleDatabaseAudit](../solutions/oracledatabaseaudit.md) +- [Pulse Connect Secure](../solutions/pulse-connect-secure.md) +- [RSA SecurID](../solutions/rsa-securid.md) +- [Sophos XG Firewall](../solutions/sophos-xg-firewall.md) +- [Symantec Endpoint Protection](../solutions/symantec-endpoint-protection.md) +- [Symantec VIP](../solutions/symantec-vip.md) +- [SymantecProxySG](../solutions/symantecproxysg.md) +- [Syslog](../solutions/syslog.md) +- [VMWareESXi](../solutions/vmwareesxi.md) +- [Watchguard Firebox](../solutions/watchguard-firebox.md) + +## Connectors (34) + +This table is ingested by the following connectors: + +- [[Deprecated] Barracuda CloudGen Firewall](../connectors/barracudacloudfirewall.md) +- [CTERA Syslog](../connectors/ctera.md) +- [[Deprecated] Cisco Application Centric Infrastructure](../connectors/ciscoaci.md) +- [[Deprecated] Cisco Identity Services Engine](../connectors/ciscoise.md) +- [Cisco Software Defined WAN](../connectors/ciscosdwan.md) +- [[Deprecated] Cisco UCS](../connectors/ciscoucs.md) +- [[Deprecated] Cisco Web Security Appliance](../connectors/ciscowsa.md) +- [[Deprecated] Citrix ADC (former NetScaler)](../connectors/citrixadc.md) +- [[Deprecated] Digital Guardian Data Loss Prevention](../connectors/digitalguardiandlp.md) +- [[Deprecated] ESET PROTECT](../connectors/esetprotect.md) +- [[Deprecated] Exabeam Advanced Analytics](../connectors/exabeam.md) +- [Forescout](../connectors/forescout.md) +- [[Deprecated] GitLab](../connectors/gitlab.md) +- [[Deprecated] ISC Bind](../connectors/iscbind.md) +- [[Deprecated] Infoblox NIOS](../connectors/infobloxnios.md) +- [[Deprecated] Ivanti Unified Endpoint Management](../connectors/ivantiuem.md) +- [[Deprecated] Juniper SRX](../connectors/junipersrx.md) +- [[Deprecated] McAfee Network Security Platform](../connectors/mcafeensp.md) +- [[Deprecated] McAfee ePolicy Orchestrator (ePO)](../connectors/mcafeeepo.md) +- [[Deprecated] Microsoft Sysmon For Linux](../connectors/microsoftsysmonforlinux.md) +- [[Deprecated] Nasuni Edge Appliance](../connectors/nasuniedgeappliance.md) +- [[Deprecated] OpenVPN Server](../connectors/openvpn.md) +- [[Deprecated] Oracle Database Audit](../connectors/oracledatabaseaudit.md) +- [[Deprecated] Pulse Connect Secure](../connectors/pulseconnectsecure.md) +- [[Deprecated] RSA® SecurID (Authentication Manager)](../connectors/rsasecuridam.md) +- [[Deprecated] Sophos XG Firewall](../connectors/sophosxgfirewall.md) +- [[Deprecated] Cisco Secure Cloud Analytics](../connectors/stealthwatch.md) +- [[Deprecated] Symantec Endpoint Protection](../connectors/symantecendpointprotection.md) +- [[Deprecated] Symantec ProxySG](../connectors/symantecproxysg.md) +- [[Deprecated] Symantec VIP](../connectors/symantecvip.md) +- [Syslog via Legacy Agent](../connectors/syslog.md) +- [Syslog via AMA](../connectors/syslogama.md) +- [[Deprecated] VMware ESXi](../connectors/vmwareesxi.md) +- [[Deprecated] WatchGuard Firebox](../connectors/watchguardfirebox.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/threatintelindicators.md b/Tools/Solutions Analyzer/connector-docs/tables/threatintelindicators.md new file mode 100644 index 00000000000..7620212c4c4 --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/threatintelindicators.md @@ -0,0 +1,35 @@ +# ThreatIntelIndicators + +**Table:** `ThreatIntelIndicators` + +This table is ingested by **2 solution(s)** using **6 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (2) + +This table is used by the following solutions: + +- [Lumen Defender Threat Feed](../solutions/lumen-defender-threat-feed.md) +- [Threat Intelligence (NEW)](../solutions/threat-intelligence-(new).md) + +## Connectors (6) + +This table is ingested by the following connectors: + +- [Lumen Defender Threat Feed Data Connector](../connectors/lumenthreatfeedconnector.md) +- [Microsoft Defender Threat Intelligence](../connectors/microsoftdefenderthreatintelligence.md) +- [Premium Microsoft Defender Threat Intelligence](../connectors/premiummicrosoftdefenderforthreatintelligence.md) +- [Threat Intelligence Platforms](../connectors/threatintelligence.md) +- [Threat intelligence - TAXII](../connectors/threatintelligencetaxii.md) +- [Threat Intelligence Upload API (Preview)](../connectors/threatintelligenceuploadindicatorsapi.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/threatintelligenceindicator.md b/Tools/Solutions Analyzer/connector-docs/tables/threatintelligenceindicator.md new file mode 100644 index 00000000000..fd1960b4f28 --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/threatintelligenceindicator.md @@ -0,0 +1,51 @@ +# ThreatIntelligenceIndicator + +**Table:** `ThreatIntelligenceIndicator` + +This table is ingested by **10 solution(s)** using **14 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (10) + +This table is used by the following solutions: + +- [CofenseIntelligence](../solutions/cofenseintelligence.md) +- [CofenseTriage](../solutions/cofensetriage.md) +- [CognyteLuminar](../solutions/cognyteluminar.md) +- [CrowdStrike Falcon Endpoint Protection](../solutions/crowdstrike-falcon-endpoint-protection.md) +- [Datalake2Sentinel](../solutions/datalake2sentinel.md) +- [GreyNoiseThreatIntelligence](../solutions/greynoisethreatintelligence.md) +- [MISP2Sentinel](../solutions/misp2sentinel.md) +- [MimecastTIRegional](../solutions/mimecasttiregional.md) +- [Threat Intelligence](../solutions/threat-intelligence.md) +- [VMRay](../solutions/vmray.md) + +## Connectors (14) + +This table is ingested by the following connectors: + +- [Cofense Intelligence Threat Indicators Ingestion](../connectors/cofenseintelligence.md) +- [Cofense Triage Threat Indicators Ingestion](../connectors/cofensetriage.md) +- [Luminar IOCs and Leaked Credentials](../connectors/cognyteluminar.md) +- [CrowdStrike Falcon Adversary Intelligence ](../connectors/crowdstrikefalconadversaryintelligence.md) +- [Datalake2Sentinel](../connectors/datalake2sentinelconnector.md) +- [GreyNoise Threat Intelligence](../connectors/greynoise2sentinelapi.md) +- [MISP2Sentinel](../connectors/misp2sentinelconnector.md) +- [Microsoft Defender Threat Intelligence](../connectors/microsoftdefenderthreatintelligence.md) +- [Mimecast Intelligence for Microsoft - Microsoft Sentinel](../connectors/mimecasttiregionalconnectorazurefunctions.md) +- [Premium Microsoft Defender Threat Intelligence](../connectors/premiummicrosoftdefenderforthreatintelligence.md) +- [Threat Intelligence Platforms](../connectors/threatintelligence.md) +- [Threat intelligence - TAXII](../connectors/threatintelligencetaxii.md) +- [Threat Intelligence Upload API (Preview)](../connectors/threatintelligenceuploadindicatorsapi.md) +- [VMRayThreatIntelligence](../connectors/vmray.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/threatintelobjects.md b/Tools/Solutions Analyzer/connector-docs/tables/threatintelobjects.md new file mode 100644 index 00000000000..3d5f475fd80 --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/threatintelobjects.md @@ -0,0 +1,33 @@ +# ThreatIntelObjects + +**Table:** `ThreatIntelObjects` + +This table is ingested by **1 solution(s)** using **5 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (1) + +This table is used by the following solutions: + +- [Threat Intelligence (NEW)](../solutions/threat-intelligence-(new).md) + +## Connectors (5) + +This table is ingested by the following connectors: + +- [Microsoft Defender Threat Intelligence](../connectors/microsoftdefenderthreatintelligence.md) +- [Premium Microsoft Defender Threat Intelligence](../connectors/premiummicrosoftdefenderforthreatintelligence.md) +- [Threat Intelligence Platforms](../connectors/threatintelligence.md) +- [Threat intelligence - TAXII](../connectors/threatintelligencetaxii.md) +- [Threat Intelligence Upload API (Preview)](../connectors/threatintelligenceuploadindicatorsapi.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/connector-docs/tables/w3ciislog.md b/Tools/Solutions Analyzer/connector-docs/tables/w3ciislog.md new file mode 100644 index 00000000000..0baed9cd76b --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/tables/w3ciislog.md @@ -0,0 +1,30 @@ +# W3CIISLog + +**Table:** `W3CIISLog` + +This table is ingested by **1 solution(s)** using **2 connector(s)**. + +⚠️ **Note:** This table name is unique to specific connectors. + +--- + +## Solutions (1) + +This table is used by the following solutions: + +- [Microsoft Exchange Security - Exchange On-Premises](../solutions/microsoft-exchange-security---exchange-on-premises.md) + +## Connectors (2) + +This table is ingested by the following connectors: + +- [[Deprecated] Microsoft Exchange Logs and Events](../connectors/esi-exchangeadminauditlogevents.md) +- [IIS Logs of Microsoft Exchange Servers](../connectors/esi-opt5exchangeiislogs.md) + +--- + +**Browse:** + +- [← Back to Tables Index](../tables-index.md) +- [Solutions Index](../solutions-index.md) +- [Connectors Index](../connectors-index.md) diff --git a/Tools/Solutions Analyzer/generate_connector_docs.py b/Tools/Solutions Analyzer/generate_connector_docs.py index d23b4456a3f..53e0787d83e 100644 --- a/Tools/Solutions Analyzer/generate_connector_docs.py +++ b/Tools/Solutions Analyzer/generate_connector_docs.py @@ -8,9 +8,11 @@ import csv from collections import defaultdict from pathlib import Path -from typing import Dict, List, Set +from typing import Any, Dict, List, Set import argparse from urllib.parse import quote +import json +import re def sanitize_anchor(text: str) -> str: @@ -18,6 +20,622 @@ def sanitize_anchor(text: str) -> str: return text.lower().replace(" ", "-").replace("/", "-").replace("_", "-") +def format_instruction_steps(instruction_steps: str) -> str: + """ + Parse and format instruction steps from CSV field. + + The instruction_steps field contains escaped JSON representing the instructionSteps array. + This function parses the JSON and formats it as markdown. + """ + if not instruction_steps: + return "" + + try: + # Parse the JSON string + steps_data = json.loads(instruction_steps) + except (json.JSONDecodeError, TypeError): + # Fallback for old format (already formatted string with
tags) + formatted = instruction_steps.replace('
', '\n') + formatted = re.sub(r'\n{3,}', '\n\n', formatted) + formatted = re.sub(r'/\*\s*Lines\s+\d+-\d+\s+omitted\s*\*/', '', formatted) + return formatted.strip() + + # Format the instruction steps recursively + return _format_instruction_steps_recursive(steps_data, indent_level=0) + + +def _format_data_connectors_grid(parameters: Dict[str, Any], indent: str = "") -> str: + """Format DataConnectorsGrid instruction type with clear explanation.""" + mapping = parameters.get("mapping", []) + menu_items = parameters.get("menuItems", []) + + lines = [ + f"{indent}**Connector Management Interface**\n\n", + f"{indent}This section is an interactive interface in the Microsoft Sentinel portal that allows you to manage your data collectors.\n\n" + ] + + if mapping: + lines.append(f"{indent}📊 **View Existing Collectors**: A management table displays all currently configured data collectors with the following information:\n") + for col in mapping: + col_name = col.get("columnName", "") + if col_name: + lines.append(f"{indent}- **{col_name}**\n") + lines.append("\n") + + lines.append(f"{indent}➕ **Add New Collector**: Click the \"Add new collector\" button to configure a new data collector (see configuration form below).\n\n") + + if "DeleteConnector" in menu_items or "EditConnector" in menu_items: + lines.append(f"{indent}🔧 **Manage Collectors**: Use the actions menu to delete or modify existing collectors.\n\n") + + lines.append(f"{indent}> 💡 **Portal-Only Feature**: This configuration interface is only available when viewing the connector in the Microsoft Sentinel portal. You cannot configure data collectors through this static documentation.\n\n") + + return "".join(lines) + + +def _format_context_pane(parameters: Dict[str, Any], indent: str = "") -> str: + """Format ContextPane instruction type with detailed form field explanation.""" + title = parameters.get("title", "Configuration Form") + subtitle = parameters.get("subtitle", "") + label = parameters.get("label", "Add new collector") + instruction_steps = parameters.get("instructionSteps", []) + + lines = [ + f"{indent}**{title}**\n\n", + ] + + if subtitle: + lines.append(f"{indent}*{subtitle}*\n\n") + + lines.append(f"{indent}When you click the \"{label}\" button in the portal, a configuration form will open. You'll need to provide:\n\n") + + # Process instruction steps to show what fields are required + if instruction_steps: + for step in instruction_steps: + step_title = step.get("title", "") + step_instructions = step.get("instructions", []) + + if step_title: + lines.append(f"{indent}*{step_title}*\n\n") + + for instruction in step_instructions: + if not isinstance(instruction, dict): + continue + + instr_type = instruction.get("type", "") + params = instruction.get("parameters", {}) + + if instr_type == "Textbox": + label_text = params.get("label", "") + placeholder = params.get("placeholder", "") + required = params.get("validations", {}).get("required", False) + req_marker = " (required)" if required else " (optional)" + + if label_text: + lines.append(f"{indent}- **{label_text}**{req_marker}") + if placeholder: + lines.append(f": {placeholder}") + lines.append("\n") + + elif instr_type == "Dropdown": + label_text = params.get("label", "") + options = params.get("options", []) + required = params.get("required", False) + req_marker = " (required)" if required else " (optional)" + + if label_text: + lines.append(f"{indent}- **{label_text}**{req_marker}: Select from available options\n") + if options: + for opt in options[:5]: # Show first 5 options + opt_text = opt.get('text', opt.get('key', '')) + if opt_text: + lines.append(f"{indent} - {opt_text}\n") + if len(options) > 5: + lines.append(f"{indent} - ... and {len(options) - 5} more options\n") + + elif instr_type == "Markdown": + content = params.get("content", "") + if content: + lines.append(f"{indent}{content}\n\n") + + lines.append("\n") + + lines.append(f"{indent}> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal.\n\n") + + return "".join(lines) + + +def _format_gcp_grid(parameters: Dict[str, Any], indent: str = "") -> str: + """Format GCPGrid instruction type.""" + lines = [ + f"{indent}**GCP Collector Management**\n\n", + f"{indent}📊 **View GCP Collectors**: A management interface displays your configured Google Cloud Platform data collectors.\n\n", + f"{indent}➕ **Add New Collector**: Click \"Add new collector\" to configure a new GCP data connection.\n\n", + f"{indent}> 💡 **Portal-Only Feature**: This configuration interface is only available in the Microsoft Sentinel portal.\n\n" + ] + return "".join(lines) + + +def _format_gcp_context_pane(parameters: Dict[str, Any], indent: str = "") -> str: + """Format GCPContextPane instruction type.""" + lines = [ + f"{indent}**GCP Connection Configuration**\n\n", + f"{indent}When you click \"Add new collector\" in the portal, you'll be prompted to provide:\n", + f"{indent}- **Project ID**: Your Google Cloud Platform project ID\n", + f"{indent}- **Service Account**: GCP service account credentials with appropriate permissions\n", + f"{indent}- **Subscription**: The Pub/Sub subscription to monitor for log data\n\n", + f"{indent}> 💡 **Portal-Only Feature**: This configuration form is only available in the Microsoft Sentinel portal.\n\n" + ] + return "".join(lines) + + +def _format_data_type_selector(instr_type: str, parameters: Dict[str, Any], indent: str = "") -> str: + """Format data type selector instruction types (AADDataTypes, MCasDataTypes, OfficeDataTypes).""" + data_types = parameters.get("dataTypes", []) + + type_names = { + "AADDataTypes": "Microsoft Entra ID", + "MCasDataTypes": "Microsoft Defender for Cloud Apps", + "OfficeDataTypes": "Microsoft 365" + } + + type_name = type_names.get(instr_type, "Data") + + lines = [ + f"{indent}**Select {type_name} Data Types**\n\n", + f"{indent}In the Microsoft Sentinel portal, select which data types to enable:\n\n" + ] + + if data_types: + for dt in data_types: + if isinstance(dt, dict): + dt_name = dt.get("name", "") + dt_title = dt.get("title", dt_name) + if dt_title: + lines.append(f"{indent}- ☐ **{dt_title}**\n") + + # Add info box if available + info_html = dt.get("infoBoxHtmlTemplate", "") + if info_html and len(info_html) < 200: + # Strip HTML tags for simple display + info_text = re.sub(r'<[^>]+>', '', info_html).strip() + if info_text: + lines.append(f"{indent} *{info_text}*\n") + lines.append("\n") + + lines.append(f"{indent}Each data type may have specific licensing requirements. Review the information provided for each type in the portal before enabling.\n\n") + lines.append(f"{indent}> 💡 **Portal-Only Feature**: Data type selection is only available in the Microsoft Sentinel portal.\n\n") + + return "".join(lines) + + +def _format_instruction_steps_recursive(instruction_steps: Any, indent_level: int = 0) -> str: + """ + Recursively format instructionSteps array to markdown. + + Args: + instruction_steps: List of instruction step objects + indent_level: Current nesting level for indentation (0 = top level) + + Returns: + Formatted markdown string + """ + if not isinstance(instruction_steps, list): + return "" + + lines = [] + step_num = 0 + indent = " " * indent_level # 2 spaces per level + + for step in instruction_steps: + if not isinstance(step, dict): + continue + + title = step.get("title", "") or "" + description = step.get("description", "") or "" + title = title.strip() if isinstance(title, str) else "" + description = description.strip() if isinstance(description, str) else "" + instructions = step.get("instructions", []) + inner_steps = step.get("innerSteps", []) + + # Skip empty steps unless they have instructions or innerSteps + if not title and not description and not instructions and not inner_steps: + continue + + # Check if title already starts with a number (to avoid duplicate numbering) + title_has_number = bool(title and re.match(r'^\d+\.', title)) + + # Only increment step number if there's substantial content and title doesn't already have a number + if (title or (description and not description.startswith(">"))) and not title_has_number: + step_num += 1 + + # Format the step with indentation + if title and description: + if indent_level == 0: + if title_has_number: + lines.append(f"**{title}**\n\n{description}\n") + else: + lines.append(f"**{step_num}. {title}**\n\n{description}\n") + else: + lines.append(f"{indent}**{title}**\n\n{indent}{description}\n") + elif title: + if indent_level == 0: + if title_has_number: + lines.append(f"**{title}**\n") + else: + lines.append(f"**{step_num}. {title}**\n") + else: + lines.append(f"{indent}**{title}**\n") + elif description: + # For notes without titles (usually start with >) + lines.append(f"{indent}{description}\n") + + # Process instructions array if present (UI elements like CopyableLabel) + if isinstance(instructions, list): + for instruction in instructions: + if not isinstance(instruction, dict): + continue + + instr_type = instruction.get("type", "") + parameters = instruction.get("parameters", {}) + + # Handle different instruction types + if instr_type == "CopyableLabel" and isinstance(parameters, dict): + label = parameters.get("label", "") + # Check for both fillWith (array) and value (string) patterns + fill_with = parameters.get("fillWith", []) + value = parameters.get("value", "") + if label: + if value: + # Use direct value if present + fill_value = value + lines.append(f"{indent}- **{label}**: `{fill_value}`\n") + elif fill_with: + # Use first element from fillWith array + fill_value = fill_with[0] if isinstance(fill_with, list) and fill_with else "" + lines.append(f"{indent}- **{label}**: `{fill_value}`\n") + lines.append(f"{indent} > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.*\n") + else: + lines.append(f"{indent}- **{label}**\n") + + elif instr_type == "InfoMessage" and isinstance(parameters, dict): + # InfoMessage: inline information message + # Parameters: text, visible, inline + text = parameters.get("text", "") + inline = parameters.get("inline", True) + visible = parameters.get("visible", True) + if text and visible: + lines.append(f"\n{indent}ℹ️ {text}\n") + + elif instr_type == "Markdown" and isinstance(parameters, dict): + # Markdown: displays formatted markdown text + # Parameters: content + content = parameters.get("content", "") + if content: + lines.append(f"{indent}{content}\n") + + elif instr_type == "MarkdownControlEnvBased" and isinstance(parameters, dict): + # Environment-based markdown (prod/gov scripts) + prod_script = parameters.get("prodScript", "") + gov_script = parameters.get("govScript", "") + if prod_script: + lines.append(f"{indent}{prod_script}\n") + if gov_script and gov_script != prod_script: + lines.append(f"{indent}\n**Government Cloud:**\n{indent}{gov_script}\n") + + elif instr_type == "Textbox" and isinstance(parameters, dict): + # Textbox: input field for text, password, number, or email + # Parameters: label, placeholder, type, name, validations + label = parameters.get("label", "") + placeholder = parameters.get("placeholder", "") + text_type = parameters.get("type", "text") + if label: + if text_type == "password": + lines.append(f"{indent}- **{label}**: (password field)\n") + elif placeholder: + lines.append(f"{indent}- **{label}**: {placeholder}\n") + else: + lines.append(f"{indent}- **{label}**\n") + + elif instr_type == "OAuthForm" and isinstance(parameters, dict): + # OAuthForm: OAuth connection form + # Parameters: clientIdLabel, clientSecretLabel, connectButtonLabel, disconnectButtonLabel + client_id_label = parameters.get("clientIdLabel", "Client ID") + client_secret_label = parameters.get("clientSecretLabel", "Client Secret") + connect_label = parameters.get("connectButtonLabel", "Connect") + lines.append(f"{indent}- **OAuth Configuration**:\n") + lines.append(f"{indent} - {client_id_label}\n") + lines.append(f"{indent} - {client_secret_label}\n") + lines.append(f"{indent} - Click '{connect_label}' to authenticate\n") + + elif instr_type == "Dropdown" and isinstance(parameters, dict): + # Dropdown: dropdown selection list + # Parameters: label, name, options, placeholder, isMultiSelect, required, defaultAllSelected + label = parameters.get("label", "") + options = parameters.get("options", []) + is_multi = parameters.get("isMultiSelect", False) + if label: + select_type = "multi-select" if is_multi else "select" + lines.append(f"{indent}- **{label}** ({select_type})\n") + if options and isinstance(options, list): + for opt in options[:5]: # Show first 5 options + if isinstance(opt, dict): + opt_text = opt.get("text", opt.get("key", "")) + if opt_text: + lines.append(f"{indent} - {opt_text}\n") + if len(options) > 5: + lines.append(f"{indent} - ... and {len(options) - 5} more options\n") + + elif instr_type == "InstallAgent" and isinstance(parameters, dict): + # InstallAgent: displays link to Azure portal sections for installation + # Parameters: linkType, policyDefinitionGuid, assignMode, dataCollectionRuleType + link_type = parameters.get("linkType", "") + if link_type: + # Map technical linkType names to user-friendly descriptions + link_descriptions = { + "InstallAgentOnWindowsVirtualMachine": "Install agent on Windows Virtual Machine", + "InstallAgentOnWindowsNonAzure": "Install agent on Windows (Non-Azure)", + "InstallAgentOnLinuxVirtualMachine": "Install agent on Linux Virtual Machine", + "InstallAgentOnLinuxNonAzure": "Install agent on Linux (Non-Azure)", + "OpenSyslogSettings": "Open Syslog settings", + "OpenCustomLogsSettings": "Open custom logs settings", + "OpenWaf": "Configure Web Application Firewall", + "OpenAzureFirewall": "Configure Azure Firewall", + "OpenMicrosoftAzureMonitoring": "Open Azure Monitoring", + "OpenFrontDoors": "Configure Azure Front Door", + "OpenCdnProfile": "Configure CDN Profile", + "AutomaticDeploymentCEF": "Automatic CEF deployment", + "OpenAzureInformationProtection": "Configure Azure Information Protection", + "OpenAzureActivityLog": "Configure Azure Activity Log", + "OpenIotPricingModel": "Configure IoT pricing", + "OpenPolicyAssignment": "Configure policy assignment", + "OpenAllAssignmentsBlade": "View all assignments", + "OpenCreateDataCollectionRule": "Create data collection rule" + } + description = link_descriptions.get(link_type, f"Install/configure: {link_type}") + lines.append(f"{indent}- **{description}**\n") + + elif instr_type == "ConnectionToggleButton" and isinstance(parameters, dict): + # ConnectionToggleButton: toggle button to connect/disconnect + # Parameters: connectLabel, disconnectLabel, name, disabled, isPrimary + connect_label = parameters.get("connectLabel", "Connect") + disconnect_label = parameters.get("disconnectLabel", "Disconnect") + lines.append(f"{indent}- Click '{connect_label}' to establish connection\n") + + elif instr_type == "InstructionStepsGroup" and isinstance(parameters, dict): + # InstructionStepsGroup: collapsible group of instructions + # Parameters: title, description, instructionSteps, canCollapseAllSections, expanded + group_title = parameters.get("title", "") + group_description = parameters.get("description", "") + group_steps = parameters.get("instructionSteps", []) + can_collapse = parameters.get("canCollapseAllSections", False) + + if group_title: + collapse_indicator = " (expandable)" if can_collapse else "" + lines.append(f"{indent}**{group_title}{collapse_indicator}**\n\n") + if group_description: + lines.append(f"{indent}{group_description}\n\n") + if group_steps: + nested_content = _format_instruction_steps_recursive(group_steps, indent_level + 1) + if nested_content: + lines.append(nested_content + "\n") + + elif instr_type == "ConfigureLogSettings" and isinstance(parameters, dict): + link_type = parameters.get("linkType", "") + lines.append(f"{indent}- Configure log settings: {link_type}\n") + + elif instr_type == "MSG" and isinstance(parameters, dict): + # Microsoft Security Graph items + msg_description = parameters.get("description", "") + items = parameters.get("items", []) + if msg_description: + lines.append(f"{indent}{msg_description}\n") + if items: + for item in items: + if isinstance(item, dict): + label = item.get("label", "") + if label: + lines.append(f"{indent} - {label}\n") + + elif instr_type in ["SecurityEvents", "WindowsSecurityEvents", "WindowsForwardedEvents", + "WindowsFirewallAma", "SysLogAma", "CefAma", "CiscoAsaAma"]: + # Data connector configuration types + lines.append(f"{indent}- Configure {instr_type} data connector\n") + + elif instr_type == "OmsDatasource" and isinstance(parameters, dict): + datasource = parameters.get("datasourceName", "") + if datasource: + lines.append(f"{indent}- Configure data source: {datasource}\n") + + elif instr_type == "OmsSolutions" and isinstance(parameters, dict): + solution = parameters.get("solutionName", "") + if solution: + lines.append(f"{indent}- Install solution: {solution}\n") + + elif instr_type == "SentinelResourceProvider" and isinstance(parameters, dict): + connector_kind = parameters.get("connectorKind", "") + title = parameters.get("title", connector_kind) + if title: + lines.append(f"{indent}- Connect {title}\n") + + elif instr_type == "DeployPushConnectorButton_test" and isinstance(parameters, dict): + label = parameters.get("label", "Deploy connector") + app_name = parameters.get("applicationDisplayName", "") + if label: + lines.append(f"{indent}- {label}\n") + if app_name: + lines.append(f"{indent} Application: {app_name}\n") + + # UI-centric instruction types + elif instr_type == "DataConnectorsGrid" and isinstance(parameters, dict): + # DataConnectorsGrid: displays a grid of data connectors + # Parameters: mapping, menuItems + lines.append(_format_data_connectors_grid(parameters, indent)) + + elif instr_type == "ContextPane" and isinstance(parameters, dict): + # ContextPane: displays a contextual information pane + # Parameters: title, subtitle, contextPaneType, instructionSteps, label, isPrimary + lines.append(_format_context_pane(parameters, indent)) + + elif instr_type == "GCPGrid": + # GCP-specific grid display + lines.append(_format_gcp_grid(parameters if isinstance(parameters, dict) else {}, indent)) + + elif instr_type == "GCPContextPane": + # GCP-specific context pane + lines.append(_format_gcp_context_pane(parameters if isinstance(parameters, dict) else {}, indent)) + + elif instr_type in ["AADDataTypes", "MCasDataTypes", "OfficeDataTypes"] and isinstance(parameters, dict): + # Data type selector for Microsoft services + lines.append(_format_data_type_selector(instr_type, parameters, indent)) + + # For any other types, show basic info if available + elif instr_type: + # For types we haven't explicitly handled, try to extract useful information + if isinstance(parameters, dict): + # Try to find useful text fields in order of preference + useful_text = None + for key in ['text', 'content', 'description', 'label', 'title', 'message']: + if key in parameters and isinstance(parameters[key], str) and parameters[key].strip(): + useful_text = parameters[key].strip() + break + + if useful_text: + # Found useful text, display it + lines.append(f"{indent}{useful_text}\n") + else: + # No useful text found, provide a generic note + lines.append(f"{indent}> 📋 **Additional Configuration Step**: This connector includes a configuration step of type `{instr_type}`. Please refer to the Microsoft Sentinel portal for detailed configuration options for this step.\n") + + # Recursively process innerSteps if present (nested sub-steps) + if isinstance(inner_steps, list) and inner_steps: + inner_content = _format_instruction_steps_recursive(inner_steps, indent_level + 1) + if inner_content: + lines.append(inner_content) + + lines.append("\n") + + return "".join(lines).strip() + + +def format_permissions(permissions_json: str) -> str: + """ + Parse and format permissions from JSON-encoded CSV field. + + Renders permissions based on the official Microsoft Sentinel data connector UI definitions: + https://learn.microsoft.com/en-us/azure/sentinel/data-connector-ui-definitions-reference#permissions + + Args: + permissions_json: JSON-encoded permissions object from CSV + + Returns: + Formatted markdown string with permissions + """ + if not permissions_json: + return "" + + try: + permissions = json.loads(permissions_json) + except json.JSONDecodeError: + # If it's not JSON, return as-is (backward compatibility) + return permissions_json.replace('
', '\n').strip() + + if not isinstance(permissions, dict): + return "" + + lines = [] + + # Resource Provider permissions + resource_providers = permissions.get("resourceProvider", []) + if isinstance(resource_providers, list) and resource_providers: + lines.append("**Resource Provider Permissions:**\n") + for rp in resource_providers: + if not isinstance(rp, dict): + continue + + provider = rp.get("provider", "") + provider_display = rp.get("providerDisplayName", "") + scope = rp.get("scope", "Workspace") + perms_text = rp.get("permissionsDisplayText", "") + required_perms = rp.get("requiredPermissions", {}) + + # Build permission description + display_name = provider_display or provider + if not display_name: + continue + + perm_parts = [] + if isinstance(required_perms, dict): + if required_perms.get("read"): + perm_parts.append("read") + if required_perms.get("write"): + perm_parts.append("write") + if required_perms.get("delete"): + perm_parts.append("delete") + if required_perms.get("action"): + perm_parts.append("action") + + # Use permissionsDisplayText if available, otherwise build from requiredPermissions + if perms_text: + lines.append(f"- **{display_name}** ({scope}): {perms_text}\n") + elif perm_parts: + perms_desc = " and ".join(perm_parts) + " permission" + ("s" if len(perm_parts) > 1 else "") + lines.append(f"- **{display_name}** ({scope}): {perms_desc} required.\n") + else: + lines.append(f"- **{display_name}** ({scope})\n") + + # Custom permissions + customs = permissions.get("customs", []) + if isinstance(customs, list) and customs: + if lines: + lines.append("\n") + lines.append("**Custom Permissions:**\n") + for custom in customs: + if not isinstance(custom, dict): + continue + name = custom.get("name", "") + description = custom.get("description", "") + + if name: + if description: + lines.append(f"- **{name}**: {description}\n") + else: + lines.append(f"- **{name}**\n") + + # Licenses + licenses = permissions.get("licenses", []) + if isinstance(licenses, list) and licenses: + if lines: + lines.append("\n") + lines.append("**Licenses:**\n") + # Map license codes to friendly names + license_names = { + "OfficeIRM": "Office Information Rights Management", + "OfficeATP": "Office Advanced Threat Protection", + "Office365": "Office 365", + "AadP1P2": "Azure AD Premium P1/P2", + "Mcas": "Microsoft Defender for Cloud Apps", + "Aatp": "Microsoft Defender for Identity", + "Mdatp": "Microsoft Defender for Endpoint", + "Mtp": "Microsoft Threat Protection", + "IoT": "Azure IoT" + } + for license in licenses: + if isinstance(license, str): + license_name = license_names.get(license, license) + lines.append(f"- {license_name}\n") + + # Tenant permissions + tenant = permissions.get("tenant", []) + if isinstance(tenant, list) and tenant: + if lines: + lines.append("\n") + lines.append("**Tenant Permissions:**\n") + tenant_roles = ", ".join(tenant) + lines.append(f"Requires {tenant_roles} on the workspace's tenant\n") + + return "".join(lines).strip() + + def generate_index_page(solutions: Dict[str, List[Dict[str, str]]], output_dir: Path) -> None: """Generate the main index page with table of all solutions.""" @@ -36,7 +654,26 @@ def generate_index_page(solutions: Dict[str, List[Dict[str, str]]], output_dir: f.write("---\n\n") f.write("## Overview\n\n") - f.write(f"This documentation covers **{len(solutions)} solutions** with data connectors, ") + + # Count solutions with connectors (solutions that have at least one row with non-empty connector_id) + solutions_with_connectors = 0 + for connectors in solutions.values(): + # A solution has a connector if at least one of its rows has a non-empty connector_id + has_connector = False + for conn in connectors: + connector_id = conn.get('connector_id', '') + # Handle both empty strings and 'nan' string values + if connector_id and str(connector_id).strip() and str(connector_id).strip().lower() != 'nan': + has_connector = True + break + if has_connector: + solutions_with_connectors += 1 + + f.write(f"This documentation covers **{len(solutions)} solutions**, ") + if solutions_with_connectors == len(solutions): + f.write(f"all of which include data connectors, ") + else: + f.write(f"of which **{solutions_with_connectors}** include data connectors, ") # Count unique connectors across all solutions all_connector_ids = set() @@ -62,8 +699,9 @@ def generate_index_page(solutions: Dict[str, List[Dict[str, str]]], output_dir: f.write("| Metric | Count |\n") f.write("|--------|-------|\n") f.write(f"| Total Solutions | {len(solutions)} |\n") - f.write(f"| Unique Connectors | {len(all_connector_ids)} |\\n") - f.write(f"| Unique Tables | {len(all_tables)} |\\n\\n") + f.write(f"| Solutions with Connectors | {solutions_with_connectors} ({100*solutions_with_connectors//len(solutions)}%) |\n") + f.write(f"| Unique Connectors | {len(all_connector_ids)} |\n") + f.write(f"| Unique Tables | {len(all_tables)} |\n\n") # Organization section f.write("## How This Documentation is Organized\\n\\n") @@ -316,30 +954,113 @@ def generate_tables_index(solutions: Dict[str, List[Dict[str, str]]], output_dir for table in sorted(by_letter[letter]): info = tables_map[table] num_solutions = len(info['solutions']) + num_connectors = len(info['connectors']) - # Create links to first few solutions - solution_links = [] - for solution_name in sorted(info['solutions'])[:3]: - solution_links.append(f"[{solution_name}](solutions/{sanitize_anchor(solution_name)}.md)") + # Determine if we should create individual table page + has_table_page = num_solutions > 1 or num_connectors > 1 - solutions_cell = ", ".join(solution_links) - if num_solutions > 3: - solutions_cell += f" +{num_solutions - 3} more" + # Table name cell with optional link + if has_table_page: + table_cell = f"[`{table}`](tables/{sanitize_anchor(table)}.md)" + else: + table_cell = f"`{table}`" - # Create links to connectors - connector_links = [] - for connector_id, connector_title in sorted(info['connectors'])[:5]: - connector_links.append(f"[{connector_title}](connectors/{sanitize_anchor(connector_id)}.md)") + # Solutions cell - limit to 3 items, link to table page for more + if num_solutions == 1: + solution_name = list(info['solutions'])[0] + solutions_cell = f"[{solution_name}](solutions/{sanitize_anchor(solution_name)}.md)" + elif num_solutions <= 3: + solution_links = [] + for solution_name in sorted(info['solutions']): + solution_links.append(f"[{solution_name}](solutions/{sanitize_anchor(solution_name)}.md)") + solutions_cell = ", ".join(solution_links) + else: + solution_links = [] + for solution_name in sorted(info['solutions'])[:3]: + solution_links.append(f"[{solution_name}](solutions/{sanitize_anchor(solution_name)}.md)") + more_link = f"[+{num_solutions - 3} more](tables/{sanitize_anchor(table)}.md)" + solutions_cell = ", ".join(solution_links) + " " + more_link - connectors_cell = ", ".join(connector_links) - if len(info['connectors']) > 5: - connectors_cell += f" +{len(info['connectors']) - 5} more" + # Connectors cell - limit to 5 items, link to table page for more + if num_connectors == 1: + connector_id, connector_title = list(info['connectors'])[0] + connectors_cell = f"[{connector_title}](connectors/{sanitize_anchor(connector_id)}.md)" + elif num_connectors <= 5: + connector_links = [] + for connector_id, connector_title in sorted(info['connectors']): + connector_links.append(f"[{connector_title}](connectors/{sanitize_anchor(connector_id)}.md)") + connectors_cell = ", ".join(connector_links) + else: + connector_links = [] + for connector_id, connector_title in sorted(info['connectors'])[:5]: + connector_links.append(f"[{connector_title}](connectors/{sanitize_anchor(connector_id)}.md)") + more_link = f"[+{num_connectors - 5} more](tables/{sanitize_anchor(table)}.md)" + connectors_cell = ", ".join(connector_links) + " " + more_link - f.write(f"| `{table}` | {solutions_cell} | {connectors_cell} |\n") + f.write(f"| {table_cell} | {solutions_cell} | {connectors_cell} |\n") f.write("\n") print(f"Generated tables index: {index_path}") + + # Return tables_map for use in generating table pages + return tables_map + + +def generate_table_pages(tables_map: Dict[str, Dict[str, any]], output_dir: Path) -> None: + """Generate individual table documentation pages for tables with multiple solutions or connectors.""" + + table_dir = output_dir / "tables" + table_dir.mkdir(parents=True, exist_ok=True) + + pages_created = 0 + + for table, info in sorted(tables_map.items()): + num_solutions = len(info['solutions']) + num_connectors = len(info['connectors']) + + # Only create pages for tables with multiple solutions or connectors + if num_solutions <= 1 and num_connectors <= 1: + continue + + table_path = table_dir / f"{sanitize_anchor(table)}.md" + + with table_path.open("w", encoding="utf-8") as f: + f.write(f"# {table}\n\n") + + # Overview + f.write(f"**Table:** `{table}`\n\n") + f.write(f"This table is ingested by **{num_solutions} solution(s)** using **{num_connectors} connector(s)**.\n\n") + + if info.get('is_unique', False): + f.write("⚠️ **Note:** This table name is unique to specific connectors.\n\n") + + f.write("---\n\n") + + # Solutions section + f.write(f"## Solutions ({num_solutions})\n\n") + f.write("This table is used by the following solutions:\n\n") + for solution_name in sorted(info['solutions']): + f.write(f"- [{solution_name}](../solutions/{sanitize_anchor(solution_name)}.md)\n") + f.write("\n") + + # Connectors section + f.write(f"## Connectors ({num_connectors})\n\n") + f.write("This table is ingested by the following connectors:\n\n") + for connector_id, connector_title in sorted(info['connectors']): + f.write(f"- [{connector_title}](../connectors/{sanitize_anchor(connector_id)}.md)\n") + f.write("\n") + + # Navigation + f.write("---\n\n") + f.write("**Browse:**\n\n") + f.write("- [← Back to Tables Index](../tables-index.md)\n") + f.write("- [Solutions Index](../solutions-index.md)\n") + f.write("- [Connectors Index](../connectors-index.md)\n") + + pages_created += 1 + + print(f"Generated {pages_created} individual table pages") def generate_connector_pages(solutions: Dict[str, List[Dict[str, str]]], output_dir: Path) -> None: @@ -409,6 +1130,21 @@ def generate_connector_pages(solutions: Dict[str, List[Dict[str, str]]], output_ description = description.replace('
', '\n\n') f.write(f"{description}\n\n") + # Permissions section + permissions = first_entry.get('connector_permissions', '') + if permissions: + f.write("## Permissions\n\n") + formatted_permissions = format_permissions(permissions) + f.write(f"{formatted_permissions}\n\n") + + # Setup Instructions section + instruction_steps = first_entry.get('connector_instruction_steps', '') + if instruction_steps: + f.write("## Setup Instructions\n\n") + f.write("> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal.\n\n") + formatted_instructions = format_instruction_steps(instruction_steps) + f.write(f"{formatted_instructions}\n\n") + # Back navigation f.write("[← Back to Connectors Index](../connectors-index.md)\n") @@ -505,6 +1241,21 @@ def generate_solution_page(solution_name: str, connectors: List[Dict[str, str]], description = description.replace('
', '\n\n') f.write(f"{description}\n\n") + # Permissions section + permissions = first_conn.get('connector_permissions', '') + if permissions: + f.write("**Permissions:**\n\n") + formatted_permissions = format_permissions(permissions) + f.write(f"{formatted_permissions}\n\n") + + # Setup Instructions section + instruction_steps = first_conn.get('connector_instruction_steps', '') + if instruction_steps: + f.write("**Setup Instructions:**\n\n") + f.write("> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal.\n\n") + formatted_instructions = format_instruction_steps(instruction_steps) + f.write(f"{formatted_instructions}\n\n") + # Combined table for Tables Ingested and Connector Definition Files tables = sorted(set(conn['Table'] for conn in conn_entries)) connector_files = first_conn.get('connector_files', '') @@ -630,7 +1381,10 @@ def main() -> None: # Generate index pages generate_index_page(by_solution, args.output_dir) generate_connectors_index(by_solution, args.output_dir) - generate_tables_index(by_solution, args.output_dir) + tables_map = generate_tables_index(by_solution, args.output_dir) + + # Generate individual table pages + generate_table_pages(tables_map, args.output_dir) # Generate individual connector pages generate_connector_pages(by_solution, args.output_dir) @@ -639,7 +1393,7 @@ def main() -> None: for solution_name, connectors in sorted(by_solution.items()): generate_solution_page(solution_name, connectors, args.output_dir) - # Count unique connectors + # Count unique connectors and tables all_connector_ids = set() for connectors in by_solution.values(): for conn in connectors: @@ -647,12 +1401,16 @@ def main() -> None: if connector_id: all_connector_ids.add(connector_id) + # Count table pages created + table_pages_count = sum(1 for t in tables_map.values() if len(t['solutions']) > 1 or len(t['connectors']) > 1) + print(f"\nDocumentation generated successfully in: {args.output_dir}") print(f" - Solutions index: {args.output_dir / 'solutions-index.md'}") print(f" - Connectors index: {args.output_dir / 'connectors-index.md'}") print(f" - Tables index: {args.output_dir / 'tables-index.md'}") print(f" - Solutions: {args.output_dir / 'solutions'}/ ({len(by_solution)} files)") print(f" - Connectors: {args.output_dir / 'connectors'}/ ({len(all_connector_ids)} files)") + print(f" - Tables: {args.output_dir / 'tables'}/ ({table_pages_count} files)") if __name__ == "__main__": diff --git a/Tools/Solutions Analyzer/solution_connector_tables.py b/Tools/Solutions Analyzer/solution_connector_tables.py index de1698f79aa..6b205510284 100644 --- a/Tools/Solutions Analyzer/solution_connector_tables.py +++ b/Tools/Solutions Analyzer/solution_connector_tables.py @@ -538,8 +538,9 @@ def walk(obj: Any, key_path: Tuple[Any, ...] = ()): # noqa: ANN401 return tables + def find_connector_objects(data: Any) -> List[Dict[str, Any]]: - """Find connector objects and extract description if present.""" + """Find connector objects and extract description, instructionSteps, and permissions if present.""" connectors: List[Dict[str, Any]] = [] stack = [data] while stack: @@ -555,10 +556,16 @@ def find_connector_objects(data: Any) -> List[Dict[str, Any]]: and isinstance(title_value, str) and not any("[variables(" in value.lower() for value in (id_value, publisher_value, title_value)) ): - # Extract description if available + # Extract description, instructionSteps, and permissions if available connector_copy = current.copy() if "descriptionMarkdown" in current: connector_copy["description"] = current["descriptionMarkdown"] + if "instructionSteps" in current: + # Store instructionSteps as JSON-encoded string + connector_copy["instructionSteps"] = json.dumps(current["instructionSteps"]) + if "permissions" in current: + # Store permissions as JSON-encoded string + connector_copy["permissions"] = json.dumps(current["permissions"]) connectors.append(connector_copy) stack.extend(current.values()) elif isinstance(current, list): @@ -931,6 +938,8 @@ def main() -> None: connector_title = entry.get("title", "") # Replace newlines with
for GitHub CSV rendering connector_description = entry.get("description", "").replace("\n", "
").replace("\r", "") + connector_instruction_steps = entry.get("instructionSteps", "") + connector_permissions = entry.get("permissions", "") had_table_definitions = had_raw_table_definitions parser_filtered_tables: Set[str] = set() parser_expansion_details: Dict[str, Set[str]] = {} @@ -1035,6 +1044,8 @@ def main() -> None: connector_publisher, connector_title, connector_description, + connector_instruction_steps, + connector_permissions, table_name, ) combo_key = (solution_info["solution_name"], connector_id, table_name) @@ -1146,6 +1157,8 @@ def main() -> None: "", # connector_publisher "", # connector_title "", # connector_description + "", # connector_instruction_steps + "", # connector_permissions "", # table_name ) grouped_rows[row_key] = {} # Empty file map for solutions without connectors @@ -1153,7 +1166,7 @@ def main() -> None: rows: List[Dict[str, str]] = [] for row_key in sorted(grouped_rows.keys()): path_map = grouped_rows[row_key] - combo_key = (row_key[0], row_key[12], row_key[16]) + combo_key = (row_key[0], row_key[12], row_key[18]) non_azure_files = sorted([path for path, is_azure in path_map.items() if not is_azure]) if non_azure_files: file_list = non_azure_files @@ -1174,7 +1187,7 @@ def main() -> None: support_info = row_key_metadata.get(row_key, {"table_detection_methods": set()}) row_data = { - "Table": row_key[16], + "Table": row_key[18], "solution_name": row_key[0], "solution_folder": f"{GITHUB_REPO_URL}/Solutions/{quote(row_key[1])}", "solution_publisher_id": row_key[2], @@ -1191,6 +1204,8 @@ def main() -> None: "connector_publisher": row_key[13], "connector_title": row_key[14], "connector_description": row_key[15], + "connector_instruction_steps": row_key[16], + "connector_permissions": row_key[17], "connector_files": ";".join(github_urls), "is_unique": "true" if len(file_list) == 1 else "false", } @@ -1220,6 +1235,8 @@ def main() -> None: "connector_publisher", "connector_title", "connector_description", + "connector_instruction_steps", + "connector_permissions", "connector_files", "is_unique", ] diff --git a/Tools/Solutions Analyzer/solutions_connectors_tables_issues_and_exceptions_report.csv b/Tools/Solutions Analyzer/solutions_connectors_tables_issues_and_exceptions_report.csv index f546ba709f8..c50d4b7bff6 100644 --- a/Tools/Solutions Analyzer/solutions_connectors_tables_issues_and_exceptions_report.csv +++ b/Tools/Solutions Analyzer/solutions_connectors_tables_issues_and_exceptions_report.csv @@ -1,7 +1,43 @@ -"solution_name","solution_folder","connector_id","connector_title","connector_publisher","connector_file","reason","details" -"Azure Resource Graph","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Resource%20Graph","AzureResourceGraph","Azure Resource Graph","Microsoft","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Resource%20Graph/Data Connectors/AzureResourceGraph_DataConnectorDefinition.json","no_table_definitions","Connector definition did not expose any table tokens." -"CustomLogsAma","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CustomLogsAma","","","","","missing_connector_json","Data Connectors folder exists but contains no readable connector JSON files." -"Microsoft 365 Assets","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20365%20Assets","M365Assets","Microsoft 365 Assets (formerly, Office 365)","Microsoft","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20365%20Assets/Data Connectors/M365Asset_DataConnectorDefinition.json","no_table_definitions","Connector definition did not expose any table tokens." -"Microsoft Entra ID Assets","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID%20Assets","EntraIDAssets","Microsoft Entra ID Assets","Microsoft","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID%20Assets/Data Connectors/EntraIDAssets_DataConnectorDefinition.json","no_table_definitions","Connector definition did not expose any table tokens." -"Palo Alto - XDR (Cortex)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20-%20XDR%20%28Cortex%29","","","","","missing_solution_metadata","Solution contains connectors but is missing SolutionMetadata.json." -"TenableAD","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TenableAD","","","","","missing_solution_metadata","Solution contains connectors but is missing SolutionMetadata.json." +"solution_name","solution_folder","connector_id","connector_title","connector_publisher","connector_file","reason","details" +"Azure Batch Account","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Batch%20Account","","","","","missing_connector_json","Data Connectors folder exists but contains no readable connector JSON files." +"Azure Cognitive Search","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Cognitive%20Search","","","","","missing_connector_json","Data Connectors folder exists but contains no readable connector JSON files." +"Azure Data Lake Storage Gen1","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Data%20Lake%20Storage%20Gen1","","","","","missing_connector_json","Data Connectors folder exists but contains no readable connector JSON files." +"Azure DDoS Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20DDoS%20Protection","","","","","missing_connector_json","Data Connectors folder exists but contains no readable connector JSON files." +"Azure Event Hubs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Event%20Hubs","","","","","missing_connector_json","Data Connectors folder exists but contains no readable connector JSON files." +"Azure Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall","","","","","missing_connector_json","Data Connectors folder exists but contains no readable connector JSON files." +"Azure Key Vault","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Key%20Vault","","","","","missing_connector_json","Data Connectors folder exists but contains no readable connector JSON files." +"Azure kubernetes Service","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20kubernetes%20Service","","","","","missing_connector_json","Data Connectors folder exists but contains no readable connector JSON files." +"Azure Logic Apps","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Logic%20Apps","","","","","missing_connector_json","Data Connectors folder exists but contains no readable connector JSON files." +"Azure Network Security Groups","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Network%20Security%20Groups","","","","","missing_connector_json","Data Connectors folder exists but contains no readable connector JSON files." +"Azure Resource Graph","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Resource%20Graph","AzureResourceGraph","Azure Resource Graph","Microsoft","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Resource%20Graph/Data Connectors/AzureResourceGraph_DataConnectorDefinition.json","no_table_definitions","Connector definition did not expose any table tokens." +"Azure Service Bus","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Service%20Bus","","","","","missing_connector_json","Data Connectors folder exists but contains no readable connector JSON files." +"Azure SQL Database solution for sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20SQL%20Database%20solution%20for%20sentinel","","","","","missing_connector_json","Data Connectors folder exists but contains no readable connector JSON files." +"Azure Storage","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Storage","","","","","missing_connector_json","Data Connectors folder exists but contains no readable connector JSON files." +"Azure Stream Analytics","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Stream%20Analytics","","","","","missing_connector_json","Data Connectors folder exists but contains no readable connector JSON files." +"Azure Web Application Firewall (WAF)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Web%20Application%20Firewall%20%28WAF%29","","","","","missing_connector_json","Data Connectors folder exists but contains no readable connector JSON files." +"Blackberry CylancePROTECT","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Blackberry%20CylancePROTECT","","","","","missing_connector_json","Data Connectors folder exists but contains no readable connector JSON files." +"CiscoASA","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoASA","","","","","missing_connector_json","Data Connectors folder exists but contains no readable connector JSON files." +"Common Event Format","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Common%20Event%20Format","","","","","missing_connector_json","Data Connectors folder exists but contains no readable connector JSON files." +"CustomLogsAma","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CustomLogsAma","","","","","missing_connector_json","Data Connectors folder exists but contains no readable connector JSON files." +"Google Cloud Platform Firewall Logs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Firewall%20Logs","","","","","missing_solution_metadata","Solution contains connectors but is missing SolutionMetadata.json." +"Microsoft 365","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20365","","","","","missing_connector_json","Data Connectors folder exists but contains no readable connector JSON files." +"Microsoft 365 Assets","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20365%20Assets","M365Assets","Microsoft 365 Assets (formerly, Office 365)","Microsoft","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20365%20Assets/Data Connectors/M365Asset_DataConnectorDefinition.json","no_table_definitions","Connector definition did not expose any table tokens." +"Microsoft Defender for Cloud Apps","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20for%20Cloud%20Apps","","","","","missing_connector_json","Data Connectors folder exists but contains no readable connector JSON files." +"Microsoft Defender For Identity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20For%20Identity","","","","","missing_connector_json","Data Connectors folder exists but contains no readable connector JSON files." +"Microsoft Defender XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR","","","","","missing_connector_json","Data Connectors folder exists but contains no readable connector JSON files." +"Microsoft Entra ID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID","","","","","missing_connector_json","Data Connectors folder exists but contains no readable connector JSON files." +"Microsoft Entra ID Assets","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID%20Assets","EntraIDAssets","Microsoft Entra ID Assets","Microsoft","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID%20Assets/Data Connectors/EntraIDAssets_DataConnectorDefinition.json","no_table_definitions","Connector definition did not expose any table tokens." +"Microsoft Entra ID Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID%20Protection","","","","","missing_connector_json","Data Connectors folder exists but contains no readable connector JSON files." +"Microsoft Project","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Project","","","","","missing_connector_json","Data Connectors folder exists but contains no readable connector JSON files." +"MicrosoftDefenderForEndpoint","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MicrosoftDefenderForEndpoint","","","","","missing_connector_json","Data Connectors folder exists but contains no readable connector JSON files." +"MicrosoftPurviewInsiderRiskManagement","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MicrosoftPurviewInsiderRiskManagement","","","","","missing_connector_json","Data Connectors folder exists but contains no readable connector JSON files." +"OneIdentity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneIdentity","","","","","missing_connector_json","Data Connectors folder exists but contains no readable connector JSON files." +"Palo Alto - XDR (Cortex)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20-%20XDR%20%28Cortex%29","","","","","missing_solution_metadata","Solution contains connectors but is missing SolutionMetadata.json." +"Symantec Integrated Cyber Defense","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Symantec%20Integrated%20Cyber%20Defense","","","","","missing_connector_json","Data Connectors folder exists but contains no readable connector JSON files." +"TenableAD","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TenableAD","","","","","missing_solution_metadata","Solution contains connectors but is missing SolutionMetadata.json." +"TransmitSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TransmitSecurity","","","","","missing_connector_json","Data Connectors folder exists but contains no readable connector JSON files." +"Windows Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Firewall","","","","","missing_connector_json","Data Connectors folder exists but contains no readable connector JSON files." +"Windows Forwarded Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Forwarded%20Events","","","","","missing_connector_json","Data Connectors folder exists but contains no readable connector JSON files." +"Windows Security Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Security%20Events","","","","","missing_connector_json","Data Connectors folder exists but contains no readable connector JSON files." +"Windows Server DNS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Server%20DNS","","","","","missing_connector_json","Data Connectors folder exists but contains no readable connector JSON files." +"Zscaler Internet Access","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zscaler%20Internet%20Access","","","","","missing_connector_json","Data Connectors folder exists but contains no readable connector JSON files." diff --git a/Tools/Solutions Analyzer/solutions_connectors_tables_mapping.csv b/Tools/Solutions Analyzer/solutions_connectors_tables_mapping.csv index f3034a25cf2..212339182d0 100644 --- a/Tools/Solutions Analyzer/solutions_connectors_tables_mapping.csv +++ b/Tools/Solutions Analyzer/solutions_connectors_tables_mapping.csv @@ -1,1163 +1,1096 @@ -"Table","solution_name","solution_folder","solution_publisher_id","solution_offer_id","solution_first_publish_date","solution_last_publish_date","solution_version","solution_support_name","solution_support_tier","solution_support_link","solution_author_name","solution_categories","connector_id","connector_publisher","connector_title","connector_description","connector_files","is_unique" -"OnePasswordEventLogs_CL","1Password","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/1Password","1password1617200969773","azure-sentinel-solution-1password","2023-12-01","","","1Password","Partner","https://support.1password.com/","","domains","1Password","1Password","1Password","The [1Password](https://www.1password.com) solution for Microsoft Sentinel enables you to ingest 1Password logs and events into Microsoft Sentinel. The connector provides visibility into 1Password Events and Alerts in Microsoft Sentinel to improve monitoring and investigation capabilities.

**Underlying Microsoft Technologies used:**

This solution takes a dependency on the following technologies, and some of these dependencies either may be in [Preview](https://azure.microsoft.com/support/legal/preview-supplemental-terms/) state or might result in additional ingestion or operational costs:

- [Azure Functions](https://azure.microsoft.com/services/functions/#overview)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/1Password/Data%20Connectors/deployment/1Password_data_connector.json","true" -"OnePasswordEventLogs_CL","1Password","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/1Password","1password1617200969773","azure-sentinel-solution-1password","2023-12-01","","","1Password","Partner","https://support.1password.com/","","domains","1Password","1Password","1Password","The [1Password](https://www.1password.com) solution for Microsoft Sentinel enables you to ingest sign-in attempts, item usage, and audit events from your 1Password Business account using the [1Password Events Reporting API](https://developer.1password.com/docs/events-api). This allows you to monitor and investigate events in 1Password in Microsoft Sentinel along with the other applications and services your organization uses.

**Underlying Microsoft Technologies used:**

This solution depends on the following technologies, and some of which may be in [Preview](https://azure.microsoft.com/support/legal/preview-supplemental-terms/) state or may incur additional ingestion or operational costs:

- [Azure Functions](https://azure.microsoft.com/services/functions/#overview)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/1Password/Data%20Connectors/1Password_API_FunctionApp.json","true" -"OnePasswordEventLogs_CL","1Password","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/1Password","1password1617200969773","azure-sentinel-solution-1password","2023-12-01","","","1Password","Partner","https://support.1password.com/","","domains","1PasswordCCPDefinition","1Password","1Password (Serverless)","The 1Password CCP connector allows the user to ingest 1Password Audit, Signin & ItemUsage events into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/1Password/Data%20Connectors/1Password_ccpv2/1Password_DataConnectorDefinition.json","true" -"apifirewall_log_1_CL","42Crunch API Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/42Crunch%20API%20Protection","42crunch1580391915541","42crunch_sentinel_solution","2022-09-21","","","42Crunch API Protection","Partner","https://42crunch.com/","","domains","42CrunchAPIProtection","42Crunch","API Protection","Connects the 42Crunch API protection to Azure Log Analytics via the REST API interface","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/42Crunch%20API%20Protection/Data%20Connectors/42CrunchAPIProtection.json","true" -"CommonSecurityLog","AI Analyst Darktrace","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AI%20Analyst%20Darktrace","darktrace1655286944672","darktrace_mss","2022-05-02","","","Darktrace","Partner","https://www.darktrace.com/en/contact/","","domains","Darktrace","Darktrace","[Deprecated] AI Analyst Darktrace via Legacy Agent","The Darktrace connector lets users connect Darktrace Model Breaches in real-time with Microsoft Sentinel, allowing creation of custom Dashboards, Workbooks, Notebooks and Custom Alerts to improve investigation. Microsoft Sentinel's enhanced visibility into Darktrace logs enables monitoring and mitigation of security threats.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AI%20Analyst%20Darktrace/Data%20Connectors/AIA-Darktrace.json","true" -"CommonSecurityLog","AI Analyst Darktrace","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AI%20Analyst%20Darktrace","darktrace1655286944672","darktrace_mss","2022-05-02","","","Darktrace","Partner","https://www.darktrace.com/en/contact/","","domains","DarktraceAma","Darktrace","[Deprecated] AI Analyst Darktrace via AMA","The Darktrace connector lets users connect Darktrace Model Breaches in real-time with Microsoft Sentinel, allowing creation of custom Dashboards, Workbooks, Notebooks and Custom Alerts to improve investigation. Microsoft Sentinel's enhanced visibility into Darktrace logs enables monitoring and mitigation of security threats.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AI%20Analyst%20Darktrace/Data%20Connectors/template_AIA-DarktraceAMA.json","true" -"AIShield_CL","AIShield AI Security Monitoring","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AIShield%20AI%20Security%20Monitoring","rbei","bgsw_aishield_sentinel","2022-01-11","2025-03-06","","AIShield","Partner","https://azuremarketplace.microsoft.com/marketplace/apps/rbei.bgsw_aishield_product/","","domains","BoschAIShield","Bosch","AIShield","[AIShield](https://www.boschaishield.com/) connector allows users to connect with AIShield custom defense mechanism logs with Microsoft Sentinel, allowing the creation of dynamic Dashboards, Workbooks, Notebooks and tailored Alerts to improve investigation and thwart attacks on AI systems. It gives users more insight into their organization's AI assets security posturing and improves their AI systems security operation capabilities.AIShield.GuArdIan analyzes the LLM generated content to identify and mitigate harmful content, safeguarding against legal, policy, role based, and usage based violations","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AIShield%20AI%20Security%20Monitoring/Data%20Connectors/AIShieldConnector.json","true" -"Event","ALC-WebCTRL","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ALC-WebCTRL","azuresentinel","azure-sentinel-solution-automated-logic-webctrl","2021-11-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AutomatedLogicWebCTRL","AutomatedLogic","Automated Logic WebCTRL ","You can stream the audit logs from the WebCTRL SQL server hosted on Windows machines connected to your Microsoft Sentinel. This connection enables you to view dashboards, create custom alerts and improve investigation. This gives insights into your Industrial Control Systems that are monitored or controlled by the WebCTRL BAS application.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ALC-WebCTRL/Data%20Connectors/Connector_WindowsEvents_WebCTRL.json","true" -"ARGOS_CL","ARGOSCloudSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ARGOSCloudSecurity","argoscloudsecurity1605618416175","argos-sentinel","2022-08-16","","","ARGOS Cloud Security","Partner","https://argos-security.io/contact-us","","domains","ARGOSCloudSecurity","ARGOS Cloud Security","ARGOS Cloud Security","The ARGOS Cloud Security integration for Microsoft Sentinel allows you to have all your important cloud security events in one place. This enables you to easily create dashboards, alerts, and correlate events across multiple systems. Overall this will improve your organization's security posture and security incident response.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ARGOSCloudSecurity/Data%20Connectors/Connector_ARGOS.json","true" -"AWSCloudFront_AccessLog_CL","AWS CloudFront","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS%20CloudFront","azuresentinel","azure-sentinel-solution-aws-cloudfront","2025-03-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsCloudfrontCcpDefinition","Microsoft","Amazon Web Services CloudFront (via Codeless Connector Framework) (Preview)","This data connector enables the integration of AWS CloudFront logs with Microsoft Sentinel to support advanced threat detection, investigation, and security monitoring. By utilizing Amazon S3 for log storage and Amazon SQS for message queuing, the connector reliably ingests CloudFront access logs into Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS%20CloudFront/Data%20Connectors/AWSCloudFrontLog_CCF/AWSCloudFrontLog_ConnectorDefinition.json","true" -"AWSSecurityHubFindings","AWS Security Hub","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS%20Security%20Hub","azuresentinel","azure-sentinel-solution-awssecurityhub","2025-03-12","2025-03-12","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsSecurityHubFindingsCcpDefinition","Microsoft","AWS Security Hub Findings (via Codeless Connector Framework)","This connector enables the ingestion of AWS Security Hub Findings, which are collected in AWS S3 buckets, into Microsoft Sentinel. It helps streamline the process of monitoring and managing security alerts by integrating AWS Security Hub Findings with Microsoft Sentinel's advanced threat detection and response capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS%20Security%20Hub/Data%20Connectors/AWSSecurityHubFindings_CCP/AWSSecurityHubFindings_DataConnectorDefinition.json","true" -"","AWS Systems Manager","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS%20Systems%20Manager","azuresentinel","azure-sentinel-solution-awssystemsmanager","","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","false" -"AWSVPCFlow","AWS VPC Flow Logs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS%20VPC%20Flow%20Logs","azuresentinel","azure-sentinel-solution-awsvpcflowlogs","2025-07-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AWSS3VPCFlowLogsParquetDefinition","Microsoft","Amazon Web Services S3 VPC Flow Logs","This connector allows you to ingest AWS VPC Flow Logs, collected in AWS S3 buckets, to Microsoft Sentinel. AWS VPC Flow Logs provide visibility into network traffic within your AWS Virtual Private Cloud (VPC), enabling security analysis and network monitoring.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS%20VPC%20Flow%20Logs/Data%20Connectors/AWSVPCFlowLogs_CCP/AWSVPCFlowLogs_DataConnectorDefinition.json","true" -"","AWSAthena","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWSAthena","azuresentinel","azure-sentinel-solution-awsathena","2022-11-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","false" -"AWSS3ServerAccess","AWS_AccessLogs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS_AccessLogs","azuresentinel","azure-sentinel-solution-awsaccesslogs","2025-02-06","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsS3ServerAccessLogsDefinition","Microsoft","AWS S3 Server Access Logs (via Codeless Connector Framework)","This connector allows you to ingest AWS S3 Server Access Logs into Microsoft Sentinel. These logs contain detailed records for requests made to S3 buckets, including the type of request, resource accessed, requester information, and response details. These logs are useful for analyzing access patterns, debugging issues, and ensuring security compliance.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS_AccessLogs/Data%20Connectors/AwsS3ServerAccessLogsDefinition_CCP/AWSS3ServerAccessLogs_ConnectorDefinition.json","true" -"","AWS_IAM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS_IAM","azuresentinel","azure-sentinel-solution-amazonwebservicesiam","2022-09-28","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"ABNORMAL_CASES_CL","AbnormalSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AbnormalSecurity","abnormalsecuritycorporation1593011233180","fe1b4806-215b-4610-bf95-965a7a65579c","2021-10-20","","","Abnormal Security","Partner","https://abnormalsecurity.com/contact","","domains","AbnormalSecurity","AbnormalSecurity","AbnormalSecurity ","The Abnormal Security data connector provides the capability to ingest threat and case logs into Microsoft Sentinel using the [Abnormal Security Rest API.](https://app.swaggerhub.com/apis/abnormal-security/abx/)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AbnormalSecurity/Data%20Connectors/AbnormalSecurity_API_FunctionApp.json","true" -"ABNORMAL_THREAT_MESSAGES_CL","AbnormalSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AbnormalSecurity","abnormalsecuritycorporation1593011233180","fe1b4806-215b-4610-bf95-965a7a65579c","2021-10-20","","","Abnormal Security","Partner","https://abnormalsecurity.com/contact","","domains","AbnormalSecurity","AbnormalSecurity","AbnormalSecurity ","The Abnormal Security data connector provides the capability to ingest threat and case logs into Microsoft Sentinel using the [Abnormal Security Rest API.](https://app.swaggerhub.com/apis/abnormal-security/abx/)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AbnormalSecurity/Data%20Connectors/AbnormalSecurity_API_FunctionApp.json","true" -"","AbuseIPDB","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AbuseIPDB","azuresentinel","azure-sentinel-solution-abuseipdb","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","false" -"","Acronis Cyber Protect Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Acronis%20Cyber%20Protect%20Cloud","acronisinternationalgmbh","azure-sentinel-solution-acronis-cyber-protect","2025-10-28","2025-10-28","","Acronis International GmbH","Partner","https://www.acronis.com/en/support","","domains,verticals","","","","","","false" -"agari_apdpolicy_log_CL","Agari","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Agari","agari","Agari_MSS","2022-05-02","","","Agari","Partner","https://support.agari.com/hc/en-us/articles/360000645632-How-to-access-Agari-Support","","domains","Agari","Agari","Agari Phishing Defense and Brand Protection","This connector uses a Agari REST API connection to push data into Azure Sentinel Log Analytics.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Agari/Data%20Connectors/Agari_API_FunctionApp.json","true" -"agari_apdtc_log_CL","Agari","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Agari","agari","Agari_MSS","2022-05-02","","","Agari","Partner","https://support.agari.com/hc/en-us/articles/360000645632-How-to-access-Agari-Support","","domains","Agari","Agari","Agari Phishing Defense and Brand Protection","This connector uses a Agari REST API connection to push data into Azure Sentinel Log Analytics.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Agari/Data%20Connectors/Agari_API_FunctionApp.json","true" -"agari_bpalerts_log_CL","Agari","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Agari","agari","Agari_MSS","2022-05-02","","","Agari","Partner","https://support.agari.com/hc/en-us/articles/360000645632-How-to-access-Agari-Support","","domains","Agari","Agari","Agari Phishing Defense and Brand Protection","This connector uses a Agari REST API connection to push data into Azure Sentinel Log Analytics.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Agari/Data%20Connectors/Agari_API_FunctionApp.json","true" -"InfoSecAnalytics_CL","AgileSec Analytics Connector","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AgileSec%20Analytics%20Connector","infosecglobal1632846037582","agilesec-analytics-connector","","","","InfoSecGlobal","Partner","https://www.infosecglobal.com/","","domains","InfoSecDataConnector","InfoSecGlobal","InfoSecGlobal Data Connector","Use this data connector to integrate with InfoSec Crypto Analytics and get data sent directly to Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AgileSec%20Analytics%20Connector/Data%20Connectors/Connector_Analytics_InfoSec.json","true" -"CommonSecurityLog","Akamai Security Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Akamai%20Security%20Events","azuresentinel","azure-sentinel-solution-akamai","2022-03-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AkamaiSecurityEvents","Akamai","[Deprecated] Akamai Security Events via Legacy Agent","Akamai Solution for Microsoft Sentinel provides the capability to ingest [Akamai Security Events](https://www.akamai.com/us/en/products/security/) into Microsoft Sentinel. Refer to [Akamai SIEM Integration documentation](https://developer.akamai.com/tools/integrations/siem) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Akamai%20Security%20Events/Data%20Connectors/Connector_CEF_Akamai.json","true" -"CommonSecurityLog","Akamai Security Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Akamai%20Security%20Events","azuresentinel","azure-sentinel-solution-akamai","2022-03-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AkamaiSecurityEventsAma","Akamai","[Deprecated] Akamai Security Events via AMA","Akamai Solution for Microsoft Sentinel provides the capability to ingest [Akamai Security Events](https://www.akamai.com/us/en/products/security/) into Microsoft Sentinel. Refer to [Akamai SIEM Integration documentation](https://developer.akamai.com/tools/integrations/siem) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Akamai%20Security%20Events/Data%20Connectors/template_AkamaiSecurityEventsAMA.json","true" -"","Alibaba Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Alibaba%20Cloud","azuresentinel","azure-sentinel-solution-alibabacloud","2022-06-27","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"AliCloudActionTrailLogs_CL","Alibaba Cloud ActionTrail","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Alibaba%20Cloud%20ActionTrail","azuresentinel","azure-sentinel-solution-alibabacloud-actiontrail","2025-07-03","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AliCloudActionTrailCCPDefinition","Microsoft","Alibaba Cloud ActionTrail (via Codeless Connector Framework)","The [Alibaba Cloud ActionTrail](https://www.alibabacloud.com/product/actiontrail) data connector provides the capability to retrieve actiontrail events stored into [Alibaba Cloud Simple Log Service](https://www.alibabacloud.com/product/log-service) and store them into Microsoft Sentinel through the [SLS REST API](https://www.alibabacloud.com/help/sls/developer-reference/api-sls-2020-12-30-getlogs). The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Alibaba%20Cloud%20ActionTrail/Data%20Connectors/AliCloudCloudTrailConnector_CCP/AliCloudActionTrail_DataConnectorDefinition.json","true" -"AlsidForADLog_CL","Alsid For AD","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Alsid%20For%20AD","alsid1603447574634","Alsid_For_AD_MSS","2022-05-06","","","Alsid","Partner","https://www.alsid.com/contact-us/","","domains","AlsidForAD","Alsid","Alsid for Active Directory","Alsid for Active Directory connector allows to export Alsid Indicators of Exposures, trailflow and Indicators of Attacks logs to Azure Sentinel in real time.
It provides a data parser to manipulate the logs more easily. The different workbooks ease your Active Directory monitoring and provide different ways to visualize the data. The analytic templates allow to automate responses regarding different events, exposures, or attacks.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Alsid%20For%20AD/Data%20Connectors/AlsidForAD.json","true" -"AWSCloudTrail","Amazon Web Services","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services","azuresentinel","azure-sentinel-solution-amazonwebservices","2022-05-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AWS","Amazon","Amazon Web Services","Follow these instructions to connect to AWS and stream your CloudTrail logs into Microsoft Sentinel. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2218883&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services/Data%20Connectors/template_AWS.json","true" -"AWSCloudTrail","Amazon Web Services","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services","azuresentinel","azure-sentinel-solution-amazonwebservices","2022-05-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsS3","Amazon","Amazon Web Services S3","This connector allows you to ingest AWS service logs, collected in AWS S3 buckets, to Microsoft Sentinel. The currently supported data types are:
* AWS CloudTrail
* VPC Flow Logs
* AWS GuardDuty
* AWSCloudWatch

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2218883&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services/Data%20Connectors/template_AwsS3.json","true" -"AWSCloudWatch","Amazon Web Services","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services","azuresentinel","azure-sentinel-solution-amazonwebservices","2022-05-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsS3","Amazon","Amazon Web Services S3","This connector allows you to ingest AWS service logs, collected in AWS S3 buckets, to Microsoft Sentinel. The currently supported data types are:
* AWS CloudTrail
* VPC Flow Logs
* AWS GuardDuty
* AWSCloudWatch

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2218883&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services/Data%20Connectors/template_AwsS3.json","true" -"AWSGuardDuty","Amazon Web Services","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services","azuresentinel","azure-sentinel-solution-amazonwebservices","2022-05-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsS3","Amazon","Amazon Web Services S3","This connector allows you to ingest AWS service logs, collected in AWS S3 buckets, to Microsoft Sentinel. The currently supported data types are:
* AWS CloudTrail
* VPC Flow Logs
* AWS GuardDuty
* AWSCloudWatch

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2218883&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services/Data%20Connectors/template_AwsS3.json","true" -"AWSVPCFlow","Amazon Web Services","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services","azuresentinel","azure-sentinel-solution-amazonwebservices","2022-05-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsS3","Amazon","Amazon Web Services S3","This connector allows you to ingest AWS service logs, collected in AWS S3 buckets, to Microsoft Sentinel. The currently supported data types are:
* AWS CloudTrail
* VPC Flow Logs
* AWS GuardDuty
* AWSCloudWatch

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2218883&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services/Data%20Connectors/template_AwsS3.json","true" -"AWSWAF","Amazon Web Services","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services","azuresentinel","azure-sentinel-solution-amazonwebservices","2022-05-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsS3WafCcpDefinition","Microsoft","Amazon Web Services S3 WAF","This connector allows you to ingest AWS WAF logs, collected in AWS S3 buckets, to Microsoft Sentinel. AWS WAF logs are detailed records of traffic that web access control lists (ACLs) analyze, which are essential for maintaining the security and performance of web applications. These logs contain information such as the time AWS WAF received the request, the specifics of the request, and the action taken by the rule that the request matched.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services/Data%20Connectors/AWS_WAF_CCP/AwsS3_WAF_DataConnectorDefinition.json","true" -"AWSNetworkFirewallAlert","Amazon Web Services NetworkFirewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20NetworkFirewall","azuresentinel","azure-sentinel-solution-aws-networkfirewall","2025-03-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsNetworkFirewallCcpDefinition","Microsoft","Amazon Web Services NetworkFirewall (via Codeless Connector Framework)","This data connector allows you to ingest AWS Network Firewall logs into Microsoft Sentinel for advanced threat detection and security monitoring. By leveraging Amazon S3 and Amazon SQS, the connector forwards network traffic logs, intrusion detection alerts, and firewall events to Microsoft Sentinel, enabling real-time analysis and correlation with other security data","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20NetworkFirewall/Data%20Connectors/AWSNetworkFirewallLogs_CCP/AWSNetworkFirewallLog_ConnectorDefinition.json","true" -"AWSNetworkFirewallFlow","Amazon Web Services NetworkFirewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20NetworkFirewall","azuresentinel","azure-sentinel-solution-aws-networkfirewall","2025-03-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsNetworkFirewallCcpDefinition","Microsoft","Amazon Web Services NetworkFirewall (via Codeless Connector Framework)","This data connector allows you to ingest AWS Network Firewall logs into Microsoft Sentinel for advanced threat detection and security monitoring. By leveraging Amazon S3 and Amazon SQS, the connector forwards network traffic logs, intrusion detection alerts, and firewall events to Microsoft Sentinel, enabling real-time analysis and correlation with other security data","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20NetworkFirewall/Data%20Connectors/AWSNetworkFirewallLogs_CCP/AWSNetworkFirewallLog_ConnectorDefinition.json","true" -"AWSNetworkFirewallTls","Amazon Web Services NetworkFirewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20NetworkFirewall","azuresentinel","azure-sentinel-solution-aws-networkfirewall","2025-03-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsNetworkFirewallCcpDefinition","Microsoft","Amazon Web Services NetworkFirewall (via Codeless Connector Framework)","This data connector allows you to ingest AWS Network Firewall logs into Microsoft Sentinel for advanced threat detection and security monitoring. By leveraging Amazon S3 and Amazon SQS, the connector forwards network traffic logs, intrusion detection alerts, and firewall events to Microsoft Sentinel, enabling real-time analysis and correlation with other security data","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20NetworkFirewall/Data%20Connectors/AWSNetworkFirewallLogs_CCP/AWSNetworkFirewallLog_ConnectorDefinition.json","true" -"AWSRoute53Resolver","Amazon Web Services Route 53","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20Route%2053","azuresentinel","azure-sentinel-solution-amazonwebservicesroute53","2025-03-21","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AWSRoute53ResolverCCPDefinition","Microsoft","Amazon Web Services S3 DNS Route53 (via Codeless Connector Framework)","This connector enables ingestion of AWS Route 53 DNS logs into Microsoft Sentinel for enhanced visibility and threat detection. It supports DNS Resolver query logs ingested directly from AWS S3 buckets, while Public DNS query logs and Route 53 audit logs can be ingested using Microsoft Sentinel's AWS CloudWatch and CloudTrail connectors. Comprehensive instructions are provided to guide you through the setup of each log type. Leverage this connector to monitor DNS activity, detect potential threats, and improve your security posture in cloud environments.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20Route%2053/Data%20Connectors/AWSRoute53Resolver_CCP/AWSRoute53Resolver_DataConnectorDefinition.json","true" -"Anvilogic_Alerts_CL","Anvilogic","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Anvilogic","anvilogic1725900018831","azure-sentinel-solution-anvilogic","2025-06-20","","","Anvilogic","Partner","https://www.anvilogic.com/","","domains","AnvilogicCCFDefinition","Anvilogic","Anvilogic","The Anvilogic data connector allows you to pull events of interest generated in the Anvilogic ADX cluster into your Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Anvilogic/Data%20Connectors/AnviLogic_CCF/Anvilogic_DataConnectorDefinition.json","true" -"","Apache Log4j Vulnerability Detection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Apache%20Log4j%20Vulnerability%20Detection","azuresentinel","azure-sentinel-solution-apachelog4jvulnerability","2021-12-15","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","false" -"ApacheHTTPServer_CL","ApacheHTTPServer","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ApacheHTTPServer","azuresentinel","azure-sentinel-solution-apachehttpserver","2021-10-27","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ApacheHTTPServer","Apache","[Deprecated] Apache HTTP Server","The Apache HTTP Server data connector provides the capability to ingest [Apache HTTP Server](http://httpd.apache.org/) events into Microsoft Sentinel. Refer to [Apache Logs documentation](https://httpd.apache.org/docs/2.4/logs.html) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ApacheHTTPServer/Data%20Connectors/Connector_ApacheHTTPServer_agent.json","true" -"CommonSecurityLog","AristaAwakeSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AristaAwakeSecurity","arista-networks","awake-security","2021-10-18","","","Arista - Awake Security","Partner","https://awakesecurity.com/","","domains","AristaAwakeSecurity","Arista Networks","[Deprecated] Awake Security via Legacy Agent","The Awake Security CEF connector allows users to send detection model matches from the Awake Security Platform to Microsoft Sentinel. Remediate threats quickly with the power of network detection and response and speed up investigations with deep visibility especially into unmanaged entities including users, devices and applications on your network. The connector also enables the creation of network security-focused custom alerts, incidents, workbooks and notebooks that align with your existing security operations workflows. ","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AristaAwakeSecurity/Data%20Connectors/Connector_AristaAwakeSecurity_CEF.json","true" -"Armis_Activities_CL","Armis","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis","armisinc1668090987837","armis-solution","2022-08-02","2024-08-23","","Armis Corporation","Partner","https://support.armis.com/","","domains","ArmisActivities","Armis","Armis Activities","The [Armis](https://www.armis.com/) Activities connector gives the capability to ingest Armis device Activities into Microsoft Sentinel through the Armis REST API. Refer to the API documentation: `https://.armis.com/api/v1/doc` for more information. The connector provides the ability to get device activity information from the Armis platform. Armis uses your existing infrastructure to discover and identify devices without having to deploy any agents. Armis detects what all devices are doing in your environment and classifies those activities to get a complete picture of device behavior. These activities are analyzed for an understanding of normal and abnormal device behavior and used to assess device and network risk.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis/Data%20Connectors/ArmisActivities/ArmisActivities_API_FunctionApp.json","true" -"Armis_Alerts_CL","Armis","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis","armisinc1668090987837","armis-solution","2022-08-02","2024-08-23","","Armis Corporation","Partner","https://support.armis.com/","","domains","ArmisAlerts","Armis","Armis Alerts","The [Armis](https://www.armis.com/) Alerts connector gives the capability to ingest Armis Alerts into Microsoft Sentinel through the Armis REST API. Refer to the API documentation: `https://.armis.com/api/v1/docs` for more information. The connector provides the ability to get alert information from the Armis platform and to identify and prioritize threats in your environment. Armis uses your existing infrastructure to discover and identify devices without having to deploy any agents. ","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis/Data%20Connectors/ArmisAlerts/ArmisAlerts_API_FunctionApp.json","true" -"Armis_Activities_CL","Armis","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis","armisinc1668090987837","armis-solution","2022-08-02","2024-08-23","","Armis Corporation","Partner","https://support.armis.com/","","domains","ArmisAlertsActivities","Armis","Armis Alerts Activities","The [Armis](https://www.armis.com/) Alerts Activities connector gives the capability to ingest Armis Alerts and Activities into Microsoft Sentinel through the Armis REST API. Refer to the API documentation: `https://.armis.com/api/v1/docs` for more information. The connector provides the ability to get alert and activity information from the Armis platform and to identify and prioritize threats in your environment. Armis uses your existing infrastructure to discover and identify devices without having to deploy any agents. ","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis/Data%20Connectors/ArmisAlertsActivities/ArmisAlertsActivities_API_FunctionApp.json","true" -"Armis_Alerts_CL","Armis","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis","armisinc1668090987837","armis-solution","2022-08-02","2024-08-23","","Armis Corporation","Partner","https://support.armis.com/","","domains","ArmisAlertsActivities","Armis","Armis Alerts Activities","The [Armis](https://www.armis.com/) Alerts Activities connector gives the capability to ingest Armis Alerts and Activities into Microsoft Sentinel through the Armis REST API. Refer to the API documentation: `https://.armis.com/api/v1/docs` for more information. The connector provides the ability to get alert and activity information from the Armis platform and to identify and prioritize threats in your environment. Armis uses your existing infrastructure to discover and identify devices without having to deploy any agents. ","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis/Data%20Connectors/ArmisAlertsActivities/ArmisAlertsActivities_API_FunctionApp.json","true" -"Armis_Devices_CL","Armis","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis","armisinc1668090987837","armis-solution","2022-08-02","2024-08-23","","Armis Corporation","Partner","https://support.armis.com/","","domains","ArmisDevices","Armis","Armis Devices","The [Armis](https://www.armis.com/) Device connector gives the capability to ingest Armis Devices into Microsoft Sentinel through the Armis REST API. Refer to the API documentation: `https://.armis.com/api/v1/docs` for more information. The connector provides the ability to get device information from the Armis platform. Armis uses your existing infrastructure to discover and identify devices without having to deploy any agents. Armis can also integrate with your existing IT & security management tools to identify and classify each and every device, managed or unmanaged in your environment.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis/Data%20Connectors/ArmisDevice/ArmisDevice_API_FunctionApp.json","true" -"Armorblox_CL","Armorblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armorblox","armorblox1601081599926","armorblox_sentinel_1","2021-10-18","","","Armorblox","Partner","https://www.armorblox.com/contact/","","domains","Armorblox","Armorblox","Armorblox","The [Armorblox](https://www.armorblox.com/) data connector provides the capability to ingest incidents from your Armorblox instance into Microsoft Sentinel through the REST API. The connector provides ability to get events which helps to examine potential security risks, and more.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armorblox/Data%20Connectors/Armorblox_API_FunctionApp.json","true" -"CommonSecurityLog","Aruba ClearPass","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Aruba%20ClearPass","azuresentinel","azure-sentinel-solution-arubaclearpass","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ArubaClearPass","Aruba Networks","[Deprecated] Aruba ClearPass via Legacy Agent","The [Aruba ClearPass](https://www.arubanetworks.com/products/security/network-access-control/secure-access/) connector allows you to easily connect your Aruba ClearPass with Microsoft Sentinel, to create custom dashboards, alerts, and improve investigation. This gives you more insight into your organization’s network and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Aruba%20ClearPass/Data%20Connectors/Connector_Syslog_ArubaClearPass.json","true" -"CommonSecurityLog","Aruba ClearPass","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Aruba%20ClearPass","azuresentinel","azure-sentinel-solution-arubaclearpass","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ArubaClearPassAma","Aruba Networks","[Deprecated] Aruba ClearPass via AMA","The [Aruba ClearPass](https://www.arubanetworks.com/products/security/network-access-control/secure-access/) connector allows you to easily connect your Aruba ClearPass with Microsoft Sentinel, to create custom dashboards, alerts, and improve investigation. This gives you more insight into your organization’s network and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Aruba%20ClearPass/Data%20Connectors/template_ArubaClearPassAMA.json","true" -"AtlassianConfluenceNativePoller_CL","AtlassianConfluenceAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianConfluenceAudit","azuresentinel","azure-sentinel-solution-atlassianconfluenceaudit","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AtlassianConfluence","Atlassian","Atlassian Confluence","The Atlassian Confluence data connector provides the capability to ingest [Atlassian Confluence audit logs](https://developer.atlassian.com/cloud/confluence/rest/api-group-audit/) into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianConfluenceAudit/Data%20Connectors/ConfluenceNativePollerConnector/azuredeploy_Confluence_native_poller_connector.json","true" -"Confluence_Audit_CL","AtlassianConfluenceAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianConfluenceAudit","azuresentinel","azure-sentinel-solution-atlassianconfluenceaudit","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ConfluenceAuditAPI","Atlassian","[Deprecated] Atlassian Confluence Audit","The [Atlassian Confluence](https://www.atlassian.com/software/confluence) Audit data connector provides the capability to ingest [Confluence Audit Records](https://support.atlassian.com/confluence-cloud/docs/view-the-audit-log/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianConfluenceAudit/Data%20Connectors/AtlassianConfluenceAuditDataConnector/ConfluenceAudit_API_FunctionApp.json","true" -"ConfluenceAuditLogs_CL","AtlassianConfluenceAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianConfluenceAudit","azuresentinel","azure-sentinel-solution-atlassianconfluenceaudit","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ConfluenceAuditCCPDefinition","Microsoft"," Atlassian Confluence Audit (via Codeless Connector Framework)","The [Atlassian Confluence](https://www.atlassian.com/software/confluence) Audit data connector provides the capability to ingest [Confluence Audit Records](https://support.atlassian.com/confluence-cloud/docs/view-the-audit-log/) events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://support.atlassian.com/confluence-cloud/docs/view-the-audit-log/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianConfluenceAudit/Data%20Connectors/AtlassianConfluenceAuditLogs_CCP/AtlassianConfluenceAudit_DataConnectorDefinition.json","true" -"Jira_Audit_CL","AtlassianJiraAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianJiraAudit","azuresentinel","azure-sentinel-solution-atlassianjiraaudit","2022-01-10","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","JiraAuditAPI","Atlassian","Atlassian Jira Audit","The [Atlassian Jira](https://www.atlassian.com/software/jira) Audit data connector provides the capability to ingest [Jira Audit Records](https://support.atlassian.com/jira-cloud-administration/docs/audit-activities-in-jira-applications/) events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-audit-records/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianJiraAudit/Data%20Connectors/JiraAudit_API_FunctionApp.json","true" -"Jira_Audit_v2_CL","AtlassianJiraAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianJiraAudit","azuresentinel","azure-sentinel-solution-atlassianjiraaudit","2022-01-10","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","JiraAuditCCPDefinition","Microsoft","Atlassian Jira Audit (using REST API)","The [Atlassian Jira](https://www.atlassian.com/software/jira) Audit data connector provides the capability to ingest [Jira Audit Records](https://support.atlassian.com/jira-cloud-administration/docs/audit-activities-in-jira-applications/) events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-audit-records/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianJiraAudit/Data%20Connectors/JiraAuditAPISentinelConnector_ccpv2/JiraAudit_DataConnectorDefinition.json","true" -"","Attacker Tools Threat Protection Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Attacker%20Tools%20Threat%20Protection%20Essentials","azuresentinel","azure-sentinel-solution-attackertools","2022-11-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"","Australian Cyber Security Centre","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Australian%20Cyber%20Security%20Centre","azuresentinel","azure-sentinel-solution-australiancybersecurity","2022-11-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"Auth0AM_CL","Auth0","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Auth0","azuresentinel","azure-sentinel-solution-auth0","2022-08-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Auth0","Auth0","Auth0 Access Management","The [Auth0 Access Management](https://auth0.com/access-management) data connector provides the capability to ingest [Auth0 log events](https://auth0.com/docs/api/management/v2/#!/Logs/get_logs) into Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Auth0/Data%20Connectors/Auth0_FunctionApp.json","true" -"Auth0Logs_CL","Auth0","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Auth0","azuresentinel","azure-sentinel-solution-auth0","2022-08-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Auth0ConnectorCCPDefinition","Microsoft","Auth0 Logs","The [Auth0](https://auth0.com/docs/api/management/v2/logs/get-logs) data connector allows ingesting logs from Auth0 API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses Auth0 API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Auth0/Data%20Connectors/Auth0_CCP/DataConnectorDefinition.json","true" -"Authomize_v2_CL","Authomize","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Authomize","authomize","azure-sentinel-solution-authomize","2023-06-15","","","Authomize","Partner","https://support.authomize.com","","domains,verticals","Authomize","Authomize","Authomize Data Connector","The Authomize Data Connector provides the capability to ingest custom log types from Authomize into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Authomize/Data%20Connectors/AuthomizeCustomConnector.json","true" -"AzureActivity","Azure Activity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Activity","azuresentinel","azure-sentinel-solution-azureactivity","2022-04-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureActivity","Microsoft","Azure Activity","Azure Activity Log is a subscription log that provides insight into subscription-level events that occur in Azure, including events from Azure Resource Manager operational data, service health events, write operations taken on the resources in your subscription, and the status of activities performed in Azure. For more information, see the [Microsoft Sentinel documentation ](https://go.microsoft.com/fwlink/p/?linkid=2219695&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Activity/Data%20Connectors/AzureActivity.json","true" -"AzureDiagnostics","Azure Batch Account","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Batch%20Account","azuresentinel","azure-sentinel-solution-batchaccount","2022-06-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AzureBatchAccount_CCP","Microsoft","Azure Batch Account","Azure Batch Account is a uniquely identified entity within the Batch service. Most Batch solutions use Azure Storage for storing resource files and output files, so each Batch account is usually associated with a corresponding storage account. This connector lets you stream your Azure Batch account diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2224103&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Batch%20Account/Data%20Connectors/AzureBatchAccount_CCP.JSON","true" -"fluentbit_CL","Azure Cloud NGFW by Palo Alto Networks","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Cloud%20NGFW%20by%20Palo%20Alto%20Networks","paloaltonetworks","cloudngfw-sentinel-solution","2023-11-03","2023-11-03","","Palo Alto Networks","Partner","https://support.paloaltonetworks.com","","domains","AzureCloudNGFWByPaloAltoNetworks","Palo Alto Networks","Azure CloudNGFW By Palo Alto Networks","Cloud Next-Generation Firewall by Palo Alto Networks - an Azure Native ISV Service - is Palo Alto Networks Next-Generation Firewall (NGFW) delivered as a cloud-native service on Azure. You can discover Cloud NGFW in the Azure Marketplace and consume it in your Azure Virtual Networks (VNet). With Cloud NGFW, you can access the core NGFW capabilities such as App-ID, URL filtering based technologies. It provides threat prevention and detection through cloud-delivered security services and threat prevention signatures. The connector allows you to easily connect your Cloud NGFW logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities. For more information, see the [Cloud NGFW for Azure documentation](https://docs.paloaltonetworks.com/cloud-ngfw/azure).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Cloud%20NGFW%20by%20Palo%20Alto%20Networks/Data%20Connectors/CloudNgfwByPAN.json","true" -"AzureDiagnostics","Azure Cognitive Search","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Cognitive%20Search","azuresentinel","azure-sentinel-solution-azurecognitivesearch","2022-06-28","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AzureCognitiveSearch_CCP","Microsoft","Azure Cognitive Search","Azure Cognitive Search is a cloud search service that gives developers infrastructure, APIs, and tools for building a rich search experience over private, heterogeneous content in web, mobile, and enterprise applications. This connector lets you stream your Azure Cognitive Search diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity. ","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Cognitive%20Search/Data%20Connectors/AzureCognitiveSearch_CCP.JSON","true" -"AzureDiagnostics","Azure DDoS Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20DDoS%20Protection","azuresentinel","azure-sentinel-solution-azureddosprotection","2022-05-13","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","DDOS","Microsoft","Azure DDoS Protection","Connect to Azure DDoS Protection Standard logs via Public IP Address Diagnostic Logs. In addition to the core DDoS protection in the platform, Azure DDoS Protection Standard provides advanced DDoS mitigation capabilities against network attacks. It's automatically tuned to protect your specific Azure resources. Protection is simple to enable during the creation of new virtual networks. It can also be done after creation and requires no application or resource changes. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2219760&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20DDoS%20Protection/Data%20Connectors/DDOS.JSON","true" -"AzureDiagnostics","Azure Data Lake Storage Gen1","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Data%20Lake%20Storage%20Gen1","azuresentinel","azure-sentinel-solution-datalakestoragegen1","2022-06-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureDataLakeStorageGen1_CCP","Microsoft","Azure Data Lake Storage Gen1","Azure Data Lake Storage Gen1 is an enterprise-wide hyper-scale repository for big data analytic workloads. Azure Data Lake enables you to capture data of any size, type, and ingestion speed in one single place for operational and exploratory analytics. This connector lets you stream your Azure Data Lake Storage Gen1 diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2223812&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Data%20Lake%20Storage%20Gen1/Data%20Connectors/AzureDataLakeStorageGen1_CCP.JSON","true" -"AzureDiagnostics","Azure Event Hubs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Event%20Hubs","azuresentinel","azure-sentinel-solution-eventhub","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AzureEventHub_CCP","Microsoft","Azure Event Hub","Azure Event Hubs is a big data streaming platform and event ingestion service. It can receive and process millions of events per second. This connector lets you stream your Azure Event Hub diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity. ","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Event%20Hubs/Data%20Connectors/AzureEventHub_CCP.JSON","true" -"AZFWApplicationRule","Azure Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall","sentinel4azurefirewall","sentinel4azurefirewall","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureFirewall","Microsoft","Azure Firewall","Connect to Azure Firewall. Azure Firewall is a managed, cloud-based network security service that protects your Azure Virtual Network resources. It's a fully stateful firewall as a service with built-in high availability and unrestricted cloud scalability. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220124&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall/Data%20Connectors/AzureFirewall.JSON","true" -"AZFWDnsQuery","Azure Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall","sentinel4azurefirewall","sentinel4azurefirewall","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureFirewall","Microsoft","Azure Firewall","Connect to Azure Firewall. Azure Firewall is a managed, cloud-based network security service that protects your Azure Virtual Network resources. It's a fully stateful firewall as a service with built-in high availability and unrestricted cloud scalability. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220124&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall/Data%20Connectors/AzureFirewall.JSON","true" -"AZFWFatFlow","Azure Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall","sentinel4azurefirewall","sentinel4azurefirewall","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureFirewall","Microsoft","Azure Firewall","Connect to Azure Firewall. Azure Firewall is a managed, cloud-based network security service that protects your Azure Virtual Network resources. It's a fully stateful firewall as a service with built-in high availability and unrestricted cloud scalability. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220124&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall/Data%20Connectors/AzureFirewall.JSON","true" -"AZFWFlowTrace","Azure Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall","sentinel4azurefirewall","sentinel4azurefirewall","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureFirewall","Microsoft","Azure Firewall","Connect to Azure Firewall. Azure Firewall is a managed, cloud-based network security service that protects your Azure Virtual Network resources. It's a fully stateful firewall as a service with built-in high availability and unrestricted cloud scalability. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220124&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall/Data%20Connectors/AzureFirewall.JSON","true" -"AZFWIdpsSignature","Azure Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall","sentinel4azurefirewall","sentinel4azurefirewall","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureFirewall","Microsoft","Azure Firewall","Connect to Azure Firewall. Azure Firewall is a managed, cloud-based network security service that protects your Azure Virtual Network resources. It's a fully stateful firewall as a service with built-in high availability and unrestricted cloud scalability. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220124&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall/Data%20Connectors/AzureFirewall.JSON","true" -"AZFWInternalFqdnResolutionFailure","Azure Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall","sentinel4azurefirewall","sentinel4azurefirewall","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureFirewall","Microsoft","Azure Firewall","Connect to Azure Firewall. Azure Firewall is a managed, cloud-based network security service that protects your Azure Virtual Network resources. It's a fully stateful firewall as a service with built-in high availability and unrestricted cloud scalability. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220124&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall/Data%20Connectors/AzureFirewall.JSON","true" -"AZFWNatRule","Azure Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall","sentinel4azurefirewall","sentinel4azurefirewall","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureFirewall","Microsoft","Azure Firewall","Connect to Azure Firewall. Azure Firewall is a managed, cloud-based network security service that protects your Azure Virtual Network resources. It's a fully stateful firewall as a service with built-in high availability and unrestricted cloud scalability. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220124&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall/Data%20Connectors/AzureFirewall.JSON","true" -"AZFWNetworkRule","Azure Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall","sentinel4azurefirewall","sentinel4azurefirewall","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureFirewall","Microsoft","Azure Firewall","Connect to Azure Firewall. Azure Firewall is a managed, cloud-based network security service that protects your Azure Virtual Network resources. It's a fully stateful firewall as a service with built-in high availability and unrestricted cloud scalability. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220124&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall/Data%20Connectors/AzureFirewall.JSON","true" -"AZFWThreatIntel","Azure Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall","sentinel4azurefirewall","sentinel4azurefirewall","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureFirewall","Microsoft","Azure Firewall","Connect to Azure Firewall. Azure Firewall is a managed, cloud-based network security service that protects your Azure Virtual Network resources. It's a fully stateful firewall as a service with built-in high availability and unrestricted cloud scalability. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220124&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall/Data%20Connectors/AzureFirewall.JSON","true" -"AzureDiagnostics","Azure Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall","sentinel4azurefirewall","sentinel4azurefirewall","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureFirewall","Microsoft","Azure Firewall","Connect to Azure Firewall. Azure Firewall is a managed, cloud-based network security service that protects your Azure Virtual Network resources. It's a fully stateful firewall as a service with built-in high availability and unrestricted cloud scalability. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220124&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall/Data%20Connectors/AzureFirewall.JSON","true" -"AzureDiagnostics","Azure Key Vault","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Key%20Vault","azuresentinel","azure-sentinel-solution-azurekeyvault","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AzureKeyVault","Microsoft","Azure Key Vault","Azure Key Vault is a cloud service for securely storing and accessing secrets. A secret is anything that you want to tightly control access to, such as API keys, passwords, certificates, or cryptographic keys. This connector lets you stream your Azure Key Vault diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity in all your instances. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220125&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Key%20Vault/Data%20Connectors/AzureKeyVault.JSON","true" -"AzureDiagnostics","Azure Logic Apps","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Logic%20Apps","azuresentinel","azure-sentinel-solution-logicapps","2022-06-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AzureLogicApps_CCP","Microsoft","Azure Logic Apps","Azure Logic Apps is a cloud-based platform for creating and running automated workflows that integrate your apps, data, services, and systems. This connector lets you stream your Azure Logic Apps diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity. ","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Logic%20Apps/Data%20Connectors/AzureLogicApps_CCP.JSON","true" -"AzureDiagnostics","Azure Network Security Groups","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Network%20Security%20Groups","azuresentinel","azure-sentinel-solution-networksecuritygroup","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AzureNSG","Microsoft","Network Security Groups","Azure network security groups (NSG) allow you to filter network traffic to and from Azure resources in an Azure virtual network. A network security group includes rules that allow or deny traffic to a virtual network subnet, network interface, or both.

When you enable logging for an NSG, you can gather the following types of resource log information:

- **Event:** Entries are logged for which NSG rules are applied to VMs, based on MAC address.
- **Rule counter:** Contains entries for how many times each NSG rule is applied to deny or allow traffic. The status for these rules is collected every 300 seconds.


This connector lets you stream your NSG diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity in all your instances. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2223718&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Network%20Security%20Groups/Data%20Connectors/AzureNSG.JSON","true" -"AzureDiagnostics","Azure SQL Database solution for sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20SQL%20Database%20solution%20for%20sentinel","sentinel4sql","sentinel4sql","2022-08-19","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureSql","Microsoft","Azure SQL Databases","Azure SQL is a fully managed, Platform-as-a-Service (PaaS) database engine that handles most database management functions, such as upgrading, patching, backups, and monitoring, without necessitating user involvement. This connector lets you stream your Azure SQL databases audit and diagnostic logs into Microsoft Sentinel, allowing you to continuously monitor activity in all your instances.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20SQL%20Database%20solution%20for%20sentinel/Data%20Connectors/template_AzureSql.JSON","true" -"AzureDiagnostics","Azure Service Bus","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Service%20Bus","azuresentinel","azure-sentinel-solution-servicebus","2022-06-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AzureServiceBus_CCP","Microsoft","Azure Service Bus","Azure Service Bus is a fully managed enterprise message broker with message queues and publish-subscribe topics (in a namespace). This connector lets you stream your Azure Service Bus diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity. ","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Service%20Bus/Data%20Connectors/AzureServiceBus_CCP.JSON","true" -"AzureMetrics","Azure Storage","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Storage","azuresentinel","azure-sentinel-solution-azurestorageaccount","2022-05-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AzureStorageAccount","Microsoft","Azure Storage Account","Azure Storage account is a cloud solution for modern data storage scenarios. It contains all your data objects: blobs, files, queues, tables, and disks. This connector lets you stream Azure Storage accounts diagnostics logs into your Microsoft Sentinel workspace, allowing you to continuously monitor activity in all your instances, and detect malicious activity in your organization. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220068&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Storage/Data%20Connectors/AzureStorageAccount_CCP.JSON","true" -"StorageBlobLogs","Azure Storage","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Storage","azuresentinel","azure-sentinel-solution-azurestorageaccount","2022-05-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AzureStorageAccount","Microsoft","Azure Storage Account","Azure Storage account is a cloud solution for modern data storage scenarios. It contains all your data objects: blobs, files, queues, tables, and disks. This connector lets you stream Azure Storage accounts diagnostics logs into your Microsoft Sentinel workspace, allowing you to continuously monitor activity in all your instances, and detect malicious activity in your organization. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220068&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Storage/Data%20Connectors/AzureStorageAccount_CCP.JSON","true" -"StorageFileLogs","Azure Storage","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Storage","azuresentinel","azure-sentinel-solution-azurestorageaccount","2022-05-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AzureStorageAccount","Microsoft","Azure Storage Account","Azure Storage account is a cloud solution for modern data storage scenarios. It contains all your data objects: blobs, files, queues, tables, and disks. This connector lets you stream Azure Storage accounts diagnostics logs into your Microsoft Sentinel workspace, allowing you to continuously monitor activity in all your instances, and detect malicious activity in your organization. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220068&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Storage/Data%20Connectors/AzureStorageAccount_CCP.JSON","true" -"StorageQueueLogs","Azure Storage","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Storage","azuresentinel","azure-sentinel-solution-azurestorageaccount","2022-05-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AzureStorageAccount","Microsoft","Azure Storage Account","Azure Storage account is a cloud solution for modern data storage scenarios. It contains all your data objects: blobs, files, queues, tables, and disks. This connector lets you stream Azure Storage accounts diagnostics logs into your Microsoft Sentinel workspace, allowing you to continuously monitor activity in all your instances, and detect malicious activity in your organization. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220068&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Storage/Data%20Connectors/AzureStorageAccount_CCP.JSON","true" -"StorageTableLogs","Azure Storage","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Storage","azuresentinel","azure-sentinel-solution-azurestorageaccount","2022-05-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AzureStorageAccount","Microsoft","Azure Storage Account","Azure Storage account is a cloud solution for modern data storage scenarios. It contains all your data objects: blobs, files, queues, tables, and disks. This connector lets you stream Azure Storage accounts diagnostics logs into your Microsoft Sentinel workspace, allowing you to continuously monitor activity in all your instances, and detect malicious activity in your organization. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220068&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Storage/Data%20Connectors/AzureStorageAccount_CCP.JSON","true" -"AzureDiagnostics","Azure Stream Analytics","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Stream%20Analytics","azuresentinel","azure-sentinel-solution-streamanalytics","2022-06-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AzureStreamAnalytics_CCP","Microsoft","Azure Stream Analytics","Azure Stream Analytics is a real-time analytics and complex event-processing engine that is designed to analyze and process high volumes of fast streaming data from multiple sources simultaneously. This connector lets you stream your Azure Stream Analytics hub diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity. ","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Stream%20Analytics/Data%20Connectors/AzureStreamAnalytics_CCP.JSON","true" -"AzureDiagnostics","Azure Web Application Firewall (WAF)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Web%20Application%20Firewall%20%28WAF%29","azuresentinel","azure-sentinel-solution-azurewebapplicationfirewal","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","WAF","Microsoft","Azure Web Application Firewall (WAF)","Connect to the Azure Web Application Firewall (WAF) for Application Gateway, Front Door, or CDN. This WAF protects your applications from common web vulnerabilities such as SQL injection and cross-site scripting, and lets you customize rules to reduce false positives. Follow these instructions to stream your Microsoft Web application firewall logs into Microsoft Sentinel. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2223546&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Web%20Application%20Firewall%20%28WAF%29/Data%20Connectors/template_WAF.JSON","true" -"AzureDiagnostics","Azure kubernetes Service","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20kubernetes%20Service","azuresentinel","azure-sentinel-solution-azurekubernetes","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AzureKubernetes","Microsoft","Azure Kubernetes Service (AKS)","Azure Kubernetes Service (AKS) is an open-source, fully-managed container orchestration service that allows you to deploy, scale, and manage Docker containers and container-based applications in a cluster environment. This connector lets you stream your Azure Kubernetes Service (AKS) diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity in all your instances. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2219762&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20kubernetes%20Service/Data%20Connectors/AzureKubernetes.JSON","true" -"ContainerInventory","Azure kubernetes Service","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20kubernetes%20Service","azuresentinel","azure-sentinel-solution-azurekubernetes","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AzureKubernetes","Microsoft","Azure Kubernetes Service (AKS)","Azure Kubernetes Service (AKS) is an open-source, fully-managed container orchestration service that allows you to deploy, scale, and manage Docker containers and container-based applications in a cluster environment. This connector lets you stream your Azure Kubernetes Service (AKS) diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity in all your instances. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2219762&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20kubernetes%20Service/Data%20Connectors/AzureKubernetes.JSON","true" -"KubeEvents","Azure kubernetes Service","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20kubernetes%20Service","azuresentinel","azure-sentinel-solution-azurekubernetes","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AzureKubernetes","Microsoft","Azure Kubernetes Service (AKS)","Azure Kubernetes Service (AKS) is an open-source, fully-managed container orchestration service that allows you to deploy, scale, and manage Docker containers and container-based applications in a cluster environment. This connector lets you stream your Azure Kubernetes Service (AKS) diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity in all your instances. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2219762&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20kubernetes%20Service/Data%20Connectors/AzureKubernetes.JSON","true" -"ADOAuditLogs_CL","AzureDevOpsAuditing","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AzureDevOpsAuditing","azuresentinel","azure-sentinel-solution-azuredevopsauditing","2022-09-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureDevOpsAuditLogs","Microsoft","Azure DevOps Audit Logs (via Codeless Connector Platform)","The Azure DevOps Audit Logs data connector allows you to ingest audit events from Azure DevOps into Microsoft Sentinel. This data connector is built using the Microsoft Sentinel Codeless Connector Platform, ensuring seamless integration. It leverages the Azure DevOps Audit Logs API to fetch detailed audit events and supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview). These transformations enable parsing of the received audit data into a custom table during ingestion, improving query performance by eliminating the need for additional parsing. By using this connector, you can gain enhanced visibility into your Azure DevOps environment and streamline your security operations.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AzureDevOpsAuditing/Data%20Connectors/AzureDevOpsAuditLogs_CCP/AzureDevOpsAuditLogs_DataConnectorDefinition.json","true" -"","AzureSecurityBenchmark","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AzureSecurityBenchmark","azuresentinel","azure-sentinel-solution-azuresecuritybenchmark","2022-06-17","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"BetterMTDAppLog_CL","BETTER Mobile Threat Defense (MTD)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BETTER%20Mobile%20Threat%20Defense%20%28MTD%29","bettermobilesecurityinc","better_mtd_mss","2022-05-02","","","Better Mobile Security Inc.","Partner","https://www.better.mobi/about#contact-us","","domains","BetterMTD","BETTER Mobile","BETTER Mobile Threat Defense (MTD)","The BETTER MTD Connector allows Enterprises to connect their Better MTD instances with Microsoft Sentinel, to view their data in Dashboards, create custom alerts, use it to trigger playbooks and expands threat hunting capabilities. This gives users more insight into their organization's mobile devices and ability to quickly analyze current mobile security posture which improves their overall SecOps capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BETTER%20Mobile%20Threat%20Defense%20%28MTD%29/Data%20Connectors/BETTERMTD.json","true" -"BetterMTDDeviceLog_CL","BETTER Mobile Threat Defense (MTD)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BETTER%20Mobile%20Threat%20Defense%20%28MTD%29","bettermobilesecurityinc","better_mtd_mss","2022-05-02","","","Better Mobile Security Inc.","Partner","https://www.better.mobi/about#contact-us","","domains","BetterMTD","BETTER Mobile","BETTER Mobile Threat Defense (MTD)","The BETTER MTD Connector allows Enterprises to connect their Better MTD instances with Microsoft Sentinel, to view their data in Dashboards, create custom alerts, use it to trigger playbooks and expands threat hunting capabilities. This gives users more insight into their organization's mobile devices and ability to quickly analyze current mobile security posture which improves their overall SecOps capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BETTER%20Mobile%20Threat%20Defense%20%28MTD%29/Data%20Connectors/BETTERMTD.json","true" -"BetterMTDIncidentLog_CL","BETTER Mobile Threat Defense (MTD)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BETTER%20Mobile%20Threat%20Defense%20%28MTD%29","bettermobilesecurityinc","better_mtd_mss","2022-05-02","","","Better Mobile Security Inc.","Partner","https://www.better.mobi/about#contact-us","","domains","BetterMTD","BETTER Mobile","BETTER Mobile Threat Defense (MTD)","The BETTER MTD Connector allows Enterprises to connect their Better MTD instances with Microsoft Sentinel, to view their data in Dashboards, create custom alerts, use it to trigger playbooks and expands threat hunting capabilities. This gives users more insight into their organization's mobile devices and ability to quickly analyze current mobile security posture which improves their overall SecOps capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BETTER%20Mobile%20Threat%20Defense%20%28MTD%29/Data%20Connectors/BETTERMTD.json","true" -"BetterMTDNetflowLog_CL","BETTER Mobile Threat Defense (MTD)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BETTER%20Mobile%20Threat%20Defense%20%28MTD%29","bettermobilesecurityinc","better_mtd_mss","2022-05-02","","","Better Mobile Security Inc.","Partner","https://www.better.mobi/about#contact-us","","domains","BetterMTD","BETTER Mobile","BETTER Mobile Threat Defense (MTD)","The BETTER MTD Connector allows Enterprises to connect their Better MTD instances with Microsoft Sentinel, to view their data in Dashboards, create custom alerts, use it to trigger playbooks and expands threat hunting capabilities. This gives users more insight into their organization's mobile devices and ability to quickly analyze current mobile security posture which improves their overall SecOps capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BETTER%20Mobile%20Threat%20Defense%20%28MTD%29/Data%20Connectors/BETTERMTD.json","true" -"Syslog","Barracuda CloudGen Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Barracuda%20CloudGen%20Firewall","microsoftsentinelcommunity","azure-sentinel-solution-barracudacloudgenfirewall","2021-05-02","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","BarracudaCloudFirewall","Barracuda","[Deprecated] Barracuda CloudGen Firewall","The Barracuda CloudGen Firewall (CGFW) connector allows you to easily connect your Barracuda CGFW logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Barracuda%20CloudGen%20Firewall/Data%20Connectors/template_BarracudaCloudFirewall.json","true" -"Barracuda_CL","Barracuda WAF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Barracuda%20WAF","barracudanetworks","barracuda_web_application_firewall_mss","2022-05-13","","","Barracuda","Partner","https://www.barracuda.com/support","","domains","Barracuda","Barracuda","[Deprecated] Barracuda Web Application Firewall via Legacy Agent","The Barracuda Web Application Firewall (WAF) connector allows you to easily connect your Barracuda logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization’s network and improves your security operation capabilities.

[For more information >​](https://aka.ms/CEF-Barracuda)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Barracuda%20WAF/Data%20Connectors/template_Barracuda.json","true" -"CommonSecurityLog","Barracuda WAF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Barracuda%20WAF","barracudanetworks","barracuda_web_application_firewall_mss","2022-05-13","","","Barracuda","Partner","https://www.barracuda.com/support","","domains","Barracuda","Barracuda","[Deprecated] Barracuda Web Application Firewall via Legacy Agent","The Barracuda Web Application Firewall (WAF) connector allows you to easily connect your Barracuda logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization’s network and improves your security operation capabilities.

[For more information >​](https://aka.ms/CEF-Barracuda)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Barracuda%20WAF/Data%20Connectors/template_Barracuda.json","true" -"barracuda_CL","Barracuda WAF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Barracuda%20WAF","barracudanetworks","barracuda_web_application_firewall_mss","2022-05-13","","","Barracuda","Partner","https://www.barracuda.com/support","","domains","Barracuda","Barracuda","[Deprecated] Barracuda Web Application Firewall via Legacy Agent","The Barracuda Web Application Firewall (WAF) connector allows you to easily connect your Barracuda logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization’s network and improves your security operation capabilities.

[For more information >​](https://aka.ms/CEF-Barracuda)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Barracuda%20WAF/Data%20Connectors/template_Barracuda.json","true" -"beSECURE_Audit_CL","Beyond Security beSECURE","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Beyond%20Security%20beSECURE","azuresentinel","azure-sentinel-solution-isvtesting12","2022-05-02","","","Beyond Security","Partner","https://beyondsecurity.freshdesk.com/support/home","","domains","BeyondSecuritybeSECURE","Beyond Security","Beyond Security beSECURE","The [Beyond Security beSECURE](https://beyondsecurity.com/) connector allows you to easily connect your Beyond Security beSECURE scan events, scan results and audit trail with Azure Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Beyond%20Security%20beSECURE/Data%20Connectors/Beyond%20Security%20beSECURE.json","true" -"beSECURE_ScanEvent_CL","Beyond Security beSECURE","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Beyond%20Security%20beSECURE","azuresentinel","azure-sentinel-solution-isvtesting12","2022-05-02","","","Beyond Security","Partner","https://beyondsecurity.freshdesk.com/support/home","","domains","BeyondSecuritybeSECURE","Beyond Security","Beyond Security beSECURE","The [Beyond Security beSECURE](https://beyondsecurity.com/) connector allows you to easily connect your Beyond Security beSECURE scan events, scan results and audit trail with Azure Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Beyond%20Security%20beSECURE/Data%20Connectors/Beyond%20Security%20beSECURE.json","true" -"beSECURE_ScanResults_CL","Beyond Security beSECURE","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Beyond%20Security%20beSECURE","azuresentinel","azure-sentinel-solution-isvtesting12","2022-05-02","","","Beyond Security","Partner","https://beyondsecurity.freshdesk.com/support/home","","domains","BeyondSecuritybeSECURE","Beyond Security","Beyond Security beSECURE","The [Beyond Security beSECURE](https://beyondsecurity.com/) connector allows you to easily connect your Beyond Security beSECURE scan events, scan results and audit trail with Azure Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Beyond%20Security%20beSECURE/Data%20Connectors/Beyond%20Security%20beSECURE.json","true" -"BigIDDSPMCatalog_CL","BigID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BigID","bigid","azure-sentinel-solution-bigid","2025-10-07","","","BigID","Partner","https://www.bigid.com/support","","domains","BigIDDSPMLogsConnectorDefinition","BigID","BigID DSPM connector","The [BigID DSPM](https://bigid.com/data-security-posture-management/) data connector provides the capability to ingest BigID DSPM cases with affected objects and datasource information into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BigID/Data%20Connectors/BigIDDSPMLogs_ccp/BigIDDSPMLogs_connectorDefinition.json","true" -"BitsightAlerts_data_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" -"BitsightBreaches_data_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" -"BitsightCompany_details_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" -"BitsightCompany_rating_details_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" -"BitsightDiligence_historical_statistics_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" -"BitsightDiligence_statistics_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" -"BitsightFindings_data_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" -"BitsightFindings_summary_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" -"BitsightGraph_data_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" -"BitsightIndustrial_statistics_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" -"BitsightObservation_statistics_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" -"BitglassLogs_CL","Bitglass","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Bitglass","azuresentinel","azure-sentinel-solution-bitglass","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Bitglass","Bitglass","Bitglass","The [Bitglass](https://www.bitglass.com/) data connector provides the capability to retrieve security event logs of the Bitglass services and more events into Microsoft Sentinel through the REST API. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Bitglass/Data%20Connectors/Bitglass_API_FunctionApp.json","true" -"BitwardenEventLogs_CL","Bitwarden","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Bitwarden","8bit-solutions-llc","bitwarden-sentinel-integration","2024-05-12","2024-10-02","","Bitwarden Inc","Partner","https://bitwarden.com","","domains","BitwardenEventLogs","Bitwarden Inc","Bitwarden Event Logs","This connector provides insight into activity of your Bitwarden organization such as user's activity (logged in, changed password, 2fa, etc.), cipher activity (created, updated, deleted, shared, etc.), collection activity, organization activity, and more.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Bitwarden/Data%20Connectors/BitwardenEventLogs/definitions.json","true" -"BitwardenGroups_CL","Bitwarden","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Bitwarden","8bit-solutions-llc","bitwarden-sentinel-integration","2024-05-12","2024-10-02","","Bitwarden Inc","Partner","https://bitwarden.com","","domains","BitwardenEventLogs","Bitwarden Inc","Bitwarden Event Logs","This connector provides insight into activity of your Bitwarden organization such as user's activity (logged in, changed password, 2fa, etc.), cipher activity (created, updated, deleted, shared, etc.), collection activity, organization activity, and more.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Bitwarden/Data%20Connectors/BitwardenEventLogs/definitions.json","true" -"BitwardenMembers_CL","Bitwarden","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Bitwarden","8bit-solutions-llc","bitwarden-sentinel-integration","2024-05-12","2024-10-02","","Bitwarden Inc","Partner","https://bitwarden.com","","domains","BitwardenEventLogs","Bitwarden Inc","Bitwarden Event Logs","This connector provides insight into activity of your Bitwarden organization such as user's activity (logged in, changed password, 2fa, etc.), cipher activity (created, updated, deleted, shared, etc.), collection activity, organization activity, and more.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Bitwarden/Data%20Connectors/BitwardenEventLogs/definitions.json","true" -"Syslog","Blackberry CylancePROTECT","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Blackberry%20CylancePROTECT","azuresentinel","azure-sentinel-solution-blackberrycylanceprotect","2022-05-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","BlackberryCylancePROTECT","Blackberry","[Deprecated] Blackberry CylancePROTECT","The [Blackberry CylancePROTECT](https://www.blackberry.com/us/en/products/blackberry-protect) connector allows you to easily connect your CylancePROTECT logs with Microsoft Sentinel. This gives you more insight into your organization's network and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Blackberry%20CylancePROTECT/Data%20Connectors/template_BlackberryCylancePROTECT.JSON","true" -"","BlinkOps","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BlinkOps","blinkoperations1709924858838","azure-sentinel-blink_automation","2025-05-05","","","Blink Support","Partner","https://support.blinkops.com","","domains","","","","","","false" -"BHEAttackPathsData_CL","BloodHound Enterprise","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BloodHound%20Enterprise","azurehoundenterprise","bloodhoundenterprise-azuresentinel","2023-05-04","2021-05-04","","SpecterOps","Partner","https://bloodhoundenterprise.io/","","domains","BloodHoundEnterprise","SpecterOps","Bloodhound Enterprise","The solution is designed to test Bloodhound Enterprise package creation process.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BloodHound%20Enterprise/Data%20Connectors/BloodHoundFunction.json","true" -"BoxEvents_CL","Box","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Box","azuresentinel","azure-sentinel-solution-box","2022-05-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","BoxDataConnector","Box","Box","The Box data connector provides the capability to ingest [Box enterprise's events](https://developer.box.com/guides/events/#admin-events) into Microsoft Sentinel using the Box REST API. Refer to [Box documentation](https://developer.box.com/guides/events/enterprise-events/for-enterprise/) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Box/Data%20Connectors/Box_API_FunctionApp.json","true" -"BoxEventsV2_CL","Box","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Box","azuresentinel","azure-sentinel-solution-box","2022-05-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","BoxEventsCCPDefinition","Microsoft","Box Events (CCP)","The Box data connector provides the capability to ingest [Box enterprise's events](https://developer.box.com/guides/events/#admin-events) into Microsoft Sentinel using the Box REST API. Refer to [Box documentation](https://developer.box.com/guides/events/enterprise-events/for-enterprise/) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Box/Data%20Connectors/BoxEvents_ccp/BoxEvents_DataConnectorDefinition.json","true" -"BoxEvents_CL","Box","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Box","azuresentinel","azure-sentinel-solution-box","2022-05-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","BoxEventsCCPDefinition","Microsoft","Box Events (CCP)","The Box data connector provides the capability to ingest [Box enterprise's events](https://developer.box.com/guides/events/#admin-events) into Microsoft Sentinel using the Box REST API. Refer to [Box documentation](https://developer.box.com/guides/events/enterprise-events/for-enterprise/) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Box/Data%20Connectors/BoxEvents_ccp/BoxEvents_DataConnectorDefinition.json","true" -"CommonSecurityLog","Broadcom SymantecDLP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Broadcom%20SymantecDLP","azuresentinel","azure-sentinel-solution-broadcomsymantecdlp","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","BroadcomSymantecDLP","Broadcom","[Deprecated] Broadcom Symantec DLP via Legacy Agent","The [Broadcom Symantec Data Loss Prevention (DLP)](https://www.broadcom.com/products/cyber-security/information-protection/data-loss-prevention) connector allows you to easily connect your Symantec DLP with Microsoft Sentinel, to create custom dashboards, alerts, and improve investigation. This gives you more insight into your organization’s information, where it travels, and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Broadcom%20SymantecDLP/Data%20Connectors/Connector_Syslog_SymantecDLP.json","true" -"CommonSecurityLog","Broadcom SymantecDLP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Broadcom%20SymantecDLP","azuresentinel","azure-sentinel-solution-broadcomsymantecdlp","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","BroadcomSymantecDLPAma","Broadcom","[Deprecated] Broadcom Symantec DLP via AMA","The [Broadcom Symantec Data Loss Prevention (DLP)](https://www.broadcom.com/products/cyber-security/information-protection/data-loss-prevention) connector allows you to easily connect your Symantec DLP with Microsoft Sentinel, to create custom dashboards, alerts, and improve investigation. This gives you more insight into your organization’s information, where it travels, and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Broadcom%20SymantecDLP/Data%20Connectors/template_SymantecDLPAMA.json","true" -"","Business Email Compromise - Financial Fraud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Business%20Email%20Compromise%20-%20Financial%20Fraud","azuresentinel","azure-sentinel-solution-bec_financialfraud","2023-08-04","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","false" -"Syslog","CTERA","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CTERA","cteranetworksltd1651947437632","ctera-microsoft-sentinel","2024-07-28","","","CTERA","Partner","https://www.ctera.com/","","domains","CTERA","CTERA Networks Ltd","CTERA Syslog","The CTERA Data Connector for Microsoft Sentinel offers monitoring and threat detection capabilities for your CTERA solution.
It includes a workbook visualizing the sum of all operations per type, deletions, and denied access operations.
It also provides analytic rules which detects ransomware incidents and alert you when a user is blocked due to suspicious ransomware activity.
Additionally, it helps you identify critical patterns such as mass access denied events, mass deletions, and mass permission changes, enabling proactive threat management and response.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CTERA/Data%20Connectors/CTERA_Data_Connector.json","true" -"CBSLog_Azure_1_CL","CTM360","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CTM360","ctm360wll1698919697848","ctm360_microsoft_sentinel_solution","2023-10-23","","","Cyber Threat Management 360","Partner","https://www.ctm360.com/","","domains","CBSPollingIDAzureFunctions","CTM360","Cyber Blind Spot Integration","Through the API integration, you have the capability to retrieve all the issues related to your CBS organizations via a RESTful interface.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CTM360/Data%20Connectors/CBS/CTM360_CBS_API_functionApp.json","true" -"HackerViewLog_Azure_1_CL","CTM360","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CTM360","ctm360wll1698919697848","ctm360_microsoft_sentinel_solution","2023-10-23","","","Cyber Threat Management 360","Partner","https://www.ctm360.com/","","domains","HVPollingIDAzureFunctions","CTM360","HackerView Intergration","Through the API integration, you have the capability to retrieve all the issues related to your HackerView organizations via a RESTful interface.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CTM360/Data%20Connectors/HackerView/CTM360_HV_API_FunctionApp.json","true" -"","Check Point","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Check%20Point","checkpoint","checkpoint-sentinel-solutions","2021-08-13","","","Check Point","Partner","https://www.checkpoint.com/support-services/contact-support/","","domains","","","","","","false" -"CloudGuard_SecurityEvents_CL","Check Point CloudGuard CNAPP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Check%20Point%20CloudGuard%20CNAPP","checkpoint","checkpoint-sentinel-solutions-cloud-guard","2024-11-12","","","Check Point","Partner","https://www.checkpoint.com/support-services/contact-support/","","domains","CloudGuardCCPDefinition","CheckPoint","Check Point CloudGuard CNAPP Connector for Microsoft Sentinel","The [CloudGuard](https://sc1.checkpoint.com/documents/CloudGuard_Dome9/Documentation/Overview/CloudGuard-CSPM-Introduction.htm?cshid=help_center_documentation) data connector enables the ingestion of security events from the CloudGuard API into Microsoft Sentinel™, using Microsoft Sentinel’s Codeless Connector Platform. The connector supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) which parses incoming security event data into custom columns. This pre-parsing process eliminates the need for query-time parsing, resulting in improved performance for data queries.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Check%20Point%20CloudGuard%20CNAPP/Data%20Connectors/CloudGuard_ccp/CloudGuard_DataConnectorDefinition.json","true" -"argsentdc_CL","Check Point Cyberint Alerts","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Check%20Point%20Cyberint%20Alerts","checkpoint","checkpoint-cyberint-solutions-alerts","2025-03-18","","","Cyberint","Partner","https://cyberint.com/customer-support/","","domains","CheckPointCyberintAlerts","Checkpoint Cyberint","Check Point Cyberint Alerts Connector (via Codeless Connector Platform)","Cyberint, a Check Point company, provides a Microsoft Sentinel integration to streamline critical Alerts and bring enriched threat intelligence from the Infinity External Risk Management solution into Microsoft Sentinel. This simplifies the process of tracking the status of tickets with automatic sync updates across systems. Using this new integration for Microsoft Sentinel, existing Cyberint and Microsoft Sentinel customers can easily pull logs based on Cyberint's findings into Microsoft Sentinel platform.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Check%20Point%20Cyberint%20Alerts/Data%20Connectors/CyberintArgosAlertsLogs_ccp/CyberintArgosAlertsLogs_connectorDefinition.json","true" -"iocsent_CL","Check Point Cyberint IOC","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Check%20Point%20Cyberint%20IOC","checkpoint","azure-sentinel-checkpoint-cyberint-ioc","2025-04-29","","","Cyberint","Partner","https://cyberint.com/customer-support/","","domains","CheckPointCyberintIOC","Checkpoint Cyberint","Check Point Cyberint IOC Connector","This is data connector for Check Point Cyberint IOC.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Check%20Point%20Cyberint%20IOC/Data%20Connectors/CyberintArgosIOCLogs_ccp/CyberintArgosIOCLogs_connectorDefinition.json","true" -"","CheckPhish by Bolster","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CheckPhish%20by%20Bolster","azuresentinel","azure-sentinel-solution-checkphishbybolster","2022-10-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","false" -"Syslog","Cisco ACI","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20ACI","azuresentinel","azure-sentinel-solution-ciscoaci","2021-07-03","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoACI","Cisco","[Deprecated] Cisco Application Centric Infrastructure","[Cisco Application Centric Infrastructure (ACI)](https://www.cisco.com/c/en/us/solutions/collateral/data-center-virtualization/application-centric-infrastructure/solution-overview-c22-741487.html) data connector provides the capability to ingest [Cisco ACI logs](https://www.cisco.com/c/en/us/td/docs/switches/datacenter/aci/apic/sw/all/syslog/guide/b_ACI_System_Messages_Guide/m-aci-system-messages-reference.html) into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20ACI/Data%20Connectors/CiscoACI_Syslog.json","true" -"CiscoETD_CL","Cisco ETD","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20ETD","cisco","cisco-etd-sentinel","2024-03-04","","","Cisco Systems","Partner","","","domains","CiscoETD","Cisco","Cisco ETD","The connector fetches data from ETD api for threat analysis","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20ETD/Data%20Connectors/CiscoETD_API_FunctionApp.json","true" -"CommonSecurityLog","Cisco Firepower EStreamer","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Firepower%20EStreamer","cisco","cisco-firepower-estreamer","2022-05-25","","","Cisco","Partner","https://www.cisco.com/c/en_in/support/index.html","","domains","CiscoFirepowerEStreamer","Cisco","[Deprecated] Cisco Firepower eStreamer via Legacy Agent","eStreamer is a Client Server API designed for the Cisco Firepower NGFW Solution. The eStreamer client requests detailed event data on behalf of the SIEM or logging solution in the Common Event Format (CEF).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Firepower%20EStreamer/Data%20Connectors/CiscoFirepowerEStreamerCollector.json","true" -"CommonSecurityLog","Cisco Firepower EStreamer","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Firepower%20EStreamer","cisco","cisco-firepower-estreamer","2022-05-25","","","Cisco","Partner","https://www.cisco.com/c/en_in/support/index.html","","domains","CiscoFirepowerEStreamerAma","Cisco","[Deprecated] Cisco Firepower eStreamer via AMA","eStreamer is a Client Server API designed for the Cisco Firepower NGFW Solution. The eStreamer client requests detailed event data on behalf of the SIEM or logging solution in the Common Event Format (CEF).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Firepower%20EStreamer/Data%20Connectors/template_CiscoFirepowerEStreamerAMA.json","true" -"Syslog","Cisco ISE","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20ISE","azuresentinel","azure-sentinel-solution-ciscoise","2021-07-03","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoISE","Cisco","[Deprecated] Cisco Identity Services Engine","The Cisco Identity Services Engine (ISE) data connector provides the capability to ingest [Cisco ISE](https://www.cisco.com/c/en/us/products/security/identity-services-engine/index.html) events into Microsoft Sentinel. It helps you gain visibility into what is happening in your network, such as who is connected, which applications are installed and running, and much more. Refer to [Cisco ISE logging mechanism documentation](https://www.cisco.com/c/en/us/td/docs/security/ise/2-7/admin_guide/b_ise_27_admin_guide/b_ISE_admin_27_maintain_monitor.html#reference_BAFBA5FA046A45938810A5DF04C00591) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20ISE/Data%20Connectors/Connector_Cisco_ISE.json","true" -"ASimAuditEventLogs","Cisco Meraki Events via REST API","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Meraki%20Events%20via%20REST%20API","azuresentinel","azure-sentinel-solution-ciscomerakinativepoller","2023-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoMerakiMultiRule","Microsoft","Cisco Meraki (using REST API)","The [Cisco Meraki](https://aka.ms/ciscomeraki) connector allows you to easily connect your Cisco Meraki organization events (Security events, Configuration Changes and API Requests) to Microsoft Sentinel. The data connector uses the [Cisco Meraki REST API](https://developer.cisco.com/meraki/api-v1/#!get-organization-appliance-security-events) to fetch logs and supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received data and ingests into ASIM and custom tables in your Log Analytics workspace. This data connector benefits from capabilities such as DCR based ingestion-time filtering, data normalization.

**Supported ASIM schema:**
1. Network Session
2. Web Session
3. Audit Event","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Meraki%20Events%20via%20REST%20API/Data%20Connectors/CiscoMerakiMultiRule_ccp/dataConnectorDefinition.json","true" -"ASimNetworkSessionLogs","Cisco Meraki Events via REST API","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Meraki%20Events%20via%20REST%20API","azuresentinel","azure-sentinel-solution-ciscomerakinativepoller","2023-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoMerakiMultiRule","Microsoft","Cisco Meraki (using REST API)","The [Cisco Meraki](https://aka.ms/ciscomeraki) connector allows you to easily connect your Cisco Meraki organization events (Security events, Configuration Changes and API Requests) to Microsoft Sentinel. The data connector uses the [Cisco Meraki REST API](https://developer.cisco.com/meraki/api-v1/#!get-organization-appliance-security-events) to fetch logs and supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received data and ingests into ASIM and custom tables in your Log Analytics workspace. This data connector benefits from capabilities such as DCR based ingestion-time filtering, data normalization.

**Supported ASIM schema:**
1. Network Session
2. Web Session
3. Audit Event","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Meraki%20Events%20via%20REST%20API/Data%20Connectors/CiscoMerakiMultiRule_ccp/dataConnectorDefinition.json","true" -"ASimWebSessionLogs","Cisco Meraki Events via REST API","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Meraki%20Events%20via%20REST%20API","azuresentinel","azure-sentinel-solution-ciscomerakinativepoller","2023-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoMerakiMultiRule","Microsoft","Cisco Meraki (using REST API)","The [Cisco Meraki](https://aka.ms/ciscomeraki) connector allows you to easily connect your Cisco Meraki organization events (Security events, Configuration Changes and API Requests) to Microsoft Sentinel. The data connector uses the [Cisco Meraki REST API](https://developer.cisco.com/meraki/api-v1/#!get-organization-appliance-security-events) to fetch logs and supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received data and ingests into ASIM and custom tables in your Log Analytics workspace. This data connector benefits from capabilities such as DCR based ingestion-time filtering, data normalization.

**Supported ASIM schema:**
1. Network Session
2. Web Session
3. Audit Event","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Meraki%20Events%20via%20REST%20API/Data%20Connectors/CiscoMerakiMultiRule_ccp/dataConnectorDefinition.json","true" -"CiscoSDWANNetflow_CL","Cisco SD-WAN","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20SD-WAN","cisco","cisco-catalyst-sdwan-sentinel","2023-06-01","2024-06-01","","Cisco Systems","Partner","https://globalcontacts.cloudapps.cisco.com/contacts/contactDetails/en_US/c1o1-c2o2-c3o8","","domains","CiscoSDWAN","Cisco","Cisco Software Defined WAN","The Cisco Software Defined WAN(SD-WAN) data connector provides the capability to ingest [Cisco SD-WAN](https://www.cisco.com/c/en_in/solutions/enterprise-networks/sd-wan/index.html) Syslog and Netflow data into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20SD-WAN/Data%20Connectors/CiscoSDWAN.json","true" -"Syslog","Cisco SD-WAN","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20SD-WAN","cisco","cisco-catalyst-sdwan-sentinel","2023-06-01","2024-06-01","","Cisco Systems","Partner","https://globalcontacts.cloudapps.cisco.com/contacts/contactDetails/en_US/c1o1-c2o2-c3o8","","domains","CiscoSDWAN","Cisco","Cisco Software Defined WAN","The Cisco Software Defined WAN(SD-WAN) data connector provides the capability to ingest [Cisco SD-WAN](https://www.cisco.com/c/en_in/solutions/enterprise-networks/sd-wan/index.html) Syslog and Netflow data into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20SD-WAN/Data%20Connectors/CiscoSDWAN.json","true" -"Syslog","Cisco Secure Cloud Analytics","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Cloud%20Analytics","azuresentinel","azure-sentinel-solution-ciscostealthwatch","2021-10-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Stealthwatch","Cisco","[Deprecated] Cisco Secure Cloud Analytics","The [Cisco Secure Cloud Analytics](https://www.cisco.com/c/en/us/products/security/stealthwatch/index.html) data connector provides the capability to ingest [Cisco Secure Cloud Analytics events](https://www.cisco.com/c/dam/en/us/td/docs/security/stealthwatch/management_console/securit_events_alarm_categories/7_4_2_Security_Events_and_Alarm_Categories_DV_2_1.pdf) into Microsoft Sentinel. Refer to [Cisco Secure Cloud Analytics documentation](https://www.cisco.com/c/dam/en/us/td/docs/security/stealthwatch/system_installation_configuration/7_5_0_System_Configuration_Guide_DV_1_3.pdf) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Cloud%20Analytics/Data%20Connectors/Cisco_Stealthwatch_syslog.json","true" -"CiscoSecureEndpoint_CL","Cisco Secure Endpoint","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Endpoint","azuresentinel","azure-sentinel-solution-ciscosecureendpoint","2021-10-28","2022-02-02","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoSecureEndpoint","Cisco","[DEPRECATED] Cisco Secure Endpoint (AMP)","The Cisco Secure Endpoint (formerly AMP for Endpoints) data connector provides the capability to ingest Cisco Secure Endpoint [audit logs](https://api-docs.amp.cisco.com/api_resources/AuditLog?api_host=api.amp.cisco.com&api_version=v1) and [events](https://api-docs.amp.cisco.com/api_actions/details?api_action=GET+%2Fv1%2Fevents&api_host=api.amp.cisco.com&api_resource=Event&api_version=v1) into Microsoft Sentinel.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Endpoint/Data%20Connectors/CiscoSecureEndpoint_API_FunctionApp.json","true" -"CiscoSecureEndpointAuditLogsV2_CL","Cisco Secure Endpoint","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Endpoint","azuresentinel","azure-sentinel-solution-ciscosecureendpoint","2021-10-28","2022-02-02","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoSecureEndpointLogsCCPDefinition","Microsoft","Cisco Secure Endpoint (via Codeless Connector Framework)","The Cisco Secure Endpoint (formerly AMP for Endpoints) data connector provides the capability to ingest Cisco Secure Endpoint [audit logs](https://developer.cisco.com/docs/secure-endpoint/auditlog/) and [events](https://developer.cisco.com/docs/secure-endpoint/v1-api-reference-event/) into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Endpoint/Data%20Connectors/CiscoSecureEndpointLogs_ccp/CiscoSecureEndpointLogs_ConnectorDefinition.json","true" -"CiscoSecureEndpointEventsV2_CL","Cisco Secure Endpoint","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Endpoint","azuresentinel","azure-sentinel-solution-ciscosecureendpoint","2021-10-28","2022-02-02","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoSecureEndpointLogsCCPDefinition","Microsoft","Cisco Secure Endpoint (via Codeless Connector Framework)","The Cisco Secure Endpoint (formerly AMP for Endpoints) data connector provides the capability to ingest Cisco Secure Endpoint [audit logs](https://developer.cisco.com/docs/secure-endpoint/auditlog/) and [events](https://developer.cisco.com/docs/secure-endpoint/v1-api-reference-event/) into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Endpoint/Data%20Connectors/CiscoSecureEndpointLogs_ccp/CiscoSecureEndpointLogs_ConnectorDefinition.json","true" -"Syslog","Cisco UCS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20UCS","azuresentinel","azure-sentinel-solution-ciscoucs","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoUCS","Cisco","[Deprecated] Cisco UCS","The [Cisco Unified Computing System (UCS)](https://www.cisco.com/c/en/us/products/servers-unified-computing/index.html) connector allows you to easily connect your Cisco UCS logs with Microsoft Sentinel This gives you more insight into your organization's network and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20UCS/Data%20Connectors/Connector_Syslog_CiscoUCS.json","true" -"CommonSecurityLog","CiscoASA","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoASA","azuresentinel","azure-sentinel-solution-ciscoasa","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoASA","Cisco","Cisco ASA via Legacy Agent","The Cisco ASA firewall connector allows you to easily connect your Cisco ASA logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoASA/Data%20Connectors/CiscoASA.JSON","true" -"CommonSecurityLog","CiscoASA","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoASA","azuresentinel","azure-sentinel-solution-ciscoasa","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoAsaAma","Microsoft","Cisco ASA/FTD via AMA","The Cisco ASA firewall connector allows you to easily connect your Cisco ASA logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoASA/Data%20Connectors/template_CiscoAsaAma.JSON","true" -"CiscoDuo_CL","CiscoDuoSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoDuoSecurity","cisco","duo-security-sentinel","2022-01-07","","","Cisco Systems","Partner","https://duo.com/support","","domains","CiscoDuoSecurity","Cisco","Cisco Duo Security","The Cisco Duo Security data connector provides the capability to ingest [authentication logs](https://duo.com/docs/adminapi#authentication-logs), [administrator logs](https://duo.com/docs/adminapi#administrator-logs), [telephony logs](https://duo.com/docs/adminapi#telephony-logs), [offline enrollment logs](https://duo.com/docs/adminapi#offline-enrollment-logs) and [Trust Monitor events](https://duo.com/docs/adminapi#trust-monitor) into Microsoft Sentinel using the Cisco Duo Admin API. Refer to [API documentation](https://duo.com/docs/adminapi) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoDuoSecurity/Data%20Connectors/CiscoDuo_API_FunctionApp.json","true" -"meraki_CL","CiscoMeraki","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoMeraki","azuresentinel","azure-sentinel-solution-ciscomeraki","2021-09-08","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoMeraki","Cisco","[Deprecated] Cisco Meraki","The [Cisco Meraki](https://meraki.cisco.com/) connector allows you to easily connect your Cisco Meraki (MX/MR/MS) logs with Microsoft Sentinel. This gives you more insight into your organization's network and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoMeraki/Data%20Connectors/Connector_Syslog_CiscoMeraki.json","true" -"CiscoMerakiNativePoller_CL","CiscoMeraki","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoMeraki","azuresentinel","azure-sentinel-solution-ciscomeraki","2021-09-08","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoMerakiNativePoller","Microsoft","Cisco Meraki (using REST API)","The [Cisco Meraki](https://aka.ms/ciscomeraki) connector allows you to easily connect your Cisco Meraki MX [security events](https://aka.ms/ciscomerakisecurityevents) to Microsoft Sentinel. The data connector uses [Cisco Meraki REST API](https://developer.cisco.com/meraki/api-v1/#!get-organization-appliance-security-events) to fetch security events and supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance.

**Supported ASIM schema:**
1. Network Session","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoMeraki/Data%20Connectors/CiscoMerakiNativePollerConnector/azuredeploy_Cisco_Meraki_native_poller_connector.json","true" -"meraki_CL","CiscoMeraki","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoMeraki","azuresentinel","azure-sentinel-solution-ciscomeraki","2021-09-08","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoMerakiNativePoller","Microsoft","Cisco Meraki (using REST API)","The [Cisco Meraki](https://aka.ms/ciscomeraki) connector allows you to easily connect your Cisco Meraki MX [security events](https://aka.ms/ciscomerakisecurityevents) to Microsoft Sentinel. The data connector uses [Cisco Meraki REST API](https://developer.cisco.com/meraki/api-v1/#!get-organization-appliance-security-events) to fetch security events and supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance.

**Supported ASIM schema:**
1. Network Session","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoMeraki/Data%20Connectors/CiscoMerakiNativePollerConnector/azuredeploy_Cisco_Meraki_native_poller_connector.json","true" -"CommonSecurityLog","CiscoSEG","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoSEG","azuresentinel","azure-sentinel-solution-ciscoseg","2021-06-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoSEG","Cisco","[Deprecated] Cisco Secure Email Gateway via Legacy Agent","The [Cisco Secure Email Gateway (SEG)](https://www.cisco.com/c/en/us/products/security/email-security/index.html) data connector provides the capability to ingest [Cisco SEG Consolidated Event Logs](https://www.cisco.com/c/en/us/td/docs/security/esa/esa14-0/user_guide/b_ESA_Admin_Guide_14-0/b_ESA_Admin_Guide_12_1_chapter_0100111.html#con_1061902) into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoSEG/Data%20Connectors/Connector_Cisco_SEG_CEF.json","true" -"CommonSecurityLog","CiscoSEG","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoSEG","azuresentinel","azure-sentinel-solution-ciscoseg","2021-06-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoSEGAma","Cisco","[Deprecated] Cisco Secure Email Gateway via AMA","The [Cisco Secure Email Gateway (SEG)](https://www.cisco.com/c/en/us/products/security/email-security/index.html) data connector provides the capability to ingest [Cisco SEG Consolidated Event Logs](https://www.cisco.com/c/en/us/td/docs/security/esa/esa14-0/user_guide/b_ESA_Admin_Guide_14-0/b_ESA_Admin_Guide_12_1_chapter_0100111.html#con_1061902) into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoSEG/Data%20Connectors/template_CiscoSEGAMA.json","true" -"Cisco_Umbrella_audit_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" -"Cisco_Umbrella_cloudfirewall_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" -"Cisco_Umbrella_dlp_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" -"Cisco_Umbrella_dns_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" -"Cisco_Umbrella_fileevent_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" -"Cisco_Umbrella_firewall_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" -"Cisco_Umbrella_intrusion_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" -"Cisco_Umbrella_ip_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" -"Cisco_Umbrella_proxy_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" -"Cisco_Umbrella_ravpnlogs_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" -"Cisco_Umbrella_ztaflow_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" -"Cisco_Umbrella_ztna_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" -"Cisco_Umbrella_audit_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" -"Cisco_Umbrella_cloudfirewall_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" -"Cisco_Umbrella_dlp_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" -"Cisco_Umbrella_dns_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" -"Cisco_Umbrella_fileevent_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" -"Cisco_Umbrella_firewall_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" -"Cisco_Umbrella_intrusion_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" -"Cisco_Umbrella_ip_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" -"Cisco_Umbrella_proxy_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" -"Cisco_Umbrella_ravpnlogs_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" -"Cisco_Umbrella_ztaflow_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" -"Cisco_Umbrella_ztna_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" -"Syslog","CiscoWSA","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoWSA","azuresentinel","azure-sentinel-solution-ciscowsa","2021-06-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoWSA","Cisco","[Deprecated] Cisco Web Security Appliance","[Cisco Web Security Appliance (WSA)](https://www.cisco.com/c/en/us/products/security/web-security-appliance/index.html) data connector provides the capability to ingest [Cisco WSA Access Logs](https://www.cisco.com/c/en/us/td/docs/security/wsa/wsa_14-0/User-Guide/b_WSA_UserGuide_14_0/b_WSA_UserGuide_11_7_chapter_010101.html) into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoWSA/Data%20Connectors/Connector_WSA_Syslog.json","true" -"Syslog","Citrix ADC","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20ADC","azuresentinel","azure-sentinel-solution-citrixadc","2022-06-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CitrixADC","Citrix","[Deprecated] Citrix ADC (former NetScaler)","The [Citrix ADC (former NetScaler)](https://www.citrix.com/products/citrix-adc/) data connector provides the capability to ingest Citrix ADC logs into Microsoft Sentinel. If you want to ingest Citrix WAF logs into Microsoft Sentinel, refer this [documentation](https://learn.microsoft.com/azure/sentinel/data-connectors/citrix-waf-web-app-firewall)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20ADC/Data%20Connectors/Connector_CitrixADC_syslog.json","true" -"CitrixAnalytics_indicatorEventDetails_CL","Citrix Analytics for Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Analytics%20for%20Security","citrix","citrix_analytics_for_security_mss","2022-05-06","","","Citrix Systems","Partner","https://www.citrix.com/support/","","domains","Citrix","CITRIX","CITRIX SECURITY ANALYTICS","Citrix Analytics (Security) integration with Microsoft Sentinel helps you to export data analyzed for risky events from Citrix Analytics (Security) into Microsoft Sentinel environment. You can create custom dashboards, analyze data from other sources along with that from Citrix Analytics (Security) and create custom workflows using Logic Apps to monitor and mitigate security events.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Analytics%20for%20Security/Data%20Connectors/CitrixSecurityAnalytics.json","true" -"CitrixAnalytics_indicatorSummary_CL","Citrix Analytics for Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Analytics%20for%20Security","citrix","citrix_analytics_for_security_mss","2022-05-06","","","Citrix Systems","Partner","https://www.citrix.com/support/","","domains","Citrix","CITRIX","CITRIX SECURITY ANALYTICS","Citrix Analytics (Security) integration with Microsoft Sentinel helps you to export data analyzed for risky events from Citrix Analytics (Security) into Microsoft Sentinel environment. You can create custom dashboards, analyze data from other sources along with that from Citrix Analytics (Security) and create custom workflows using Logic Apps to monitor and mitigate security events.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Analytics%20for%20Security/Data%20Connectors/CitrixSecurityAnalytics.json","true" -"CitrixAnalytics_riskScoreChange_CL","Citrix Analytics for Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Analytics%20for%20Security","citrix","citrix_analytics_for_security_mss","2022-05-06","","","Citrix Systems","Partner","https://www.citrix.com/support/","","domains","Citrix","CITRIX","CITRIX SECURITY ANALYTICS","Citrix Analytics (Security) integration with Microsoft Sentinel helps you to export data analyzed for risky events from Citrix Analytics (Security) into Microsoft Sentinel environment. You can create custom dashboards, analyze data from other sources along with that from Citrix Analytics (Security) and create custom workflows using Logic Apps to monitor and mitigate security events.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Analytics%20for%20Security/Data%20Connectors/CitrixSecurityAnalytics.json","true" -"CitrixAnalytics_userProfile_CL","Citrix Analytics for Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Analytics%20for%20Security","citrix","citrix_analytics_for_security_mss","2022-05-06","","","Citrix Systems","Partner","https://www.citrix.com/support/","","domains","Citrix","CITRIX","CITRIX SECURITY ANALYTICS","Citrix Analytics (Security) integration with Microsoft Sentinel helps you to export data analyzed for risky events from Citrix Analytics (Security) into Microsoft Sentinel environment. You can create custom dashboards, analyze data from other sources along with that from Citrix Analytics (Security) and create custom workflows using Logic Apps to monitor and mitigate security events.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Analytics%20for%20Security/Data%20Connectors/CitrixSecurityAnalytics.json","true" -"CommonSecurityLog","Citrix Web App Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Web%20App%20Firewall","citrix","citrix_waf_mss","2022-05-06","","","Citrix Systems","Partner","https://www.citrix.com/support/","","domains","CitrixWAF","Citrix Systems Inc.","[Deprecated] Citrix WAF (Web App Firewall) via Legacy Agent"," Citrix WAF (Web App Firewall) is an industry leading enterprise-grade WAF solution. Citrix WAF mitigates threats against your public-facing assets, including websites, apps, and APIs. From layer 3 to layer 7, Citrix WAF includes protections such as IP reputation, bot mitigation, defense against the OWASP Top 10 application threats, built-in signatures to protect against application stack vulnerabilities, and more.

Citrix WAF supports Common Event Format (CEF) which is an industry standard format on top of Syslog messages . By connecting Citrix WAF CEF logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Web%20App%20Firewall/Data%20Connectors/Citrix_WAF.json","true" -"CommonSecurityLog","Citrix Web App Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Web%20App%20Firewall","citrix","citrix_waf_mss","2022-05-06","","","Citrix Systems","Partner","https://www.citrix.com/support/","","domains","CitrixWAFAma","Citrix Systems Inc.","[Deprecated] Citrix WAF (Web App Firewall) via AMA"," Citrix WAF (Web App Firewall) is an industry leading enterprise-grade WAF solution. Citrix WAF mitigates threats against your public-facing assets, including websites, apps, and APIs. From layer 3 to layer 7, Citrix WAF includes protections such as IP reputation, bot mitigation, defense against the OWASP Top 10 application threats, built-in signatures to protect against application stack vulnerabilities, and more.

Citrix WAF supports Common Event Format (CEF) which is an industry standard format on top of Syslog messages . By connecting Citrix WAF CEF logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Web%20App%20Firewall/Data%20Connectors/template_Citrix_WAFAMA.json","true" -"CommonSecurityLog","Claroty","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Claroty","azuresentinel","azure-sentinel-solution-claroty","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Claroty","Claroty","[Deprecated] Claroty via Legacy Agent","The [Claroty](https://claroty.com/) data connector provides the capability to ingest [Continuous Threat Detection](https://claroty.com/resources/datasheets/continuous-threat-detection) and [Secure Remote Access](https://claroty.com/industrial-cybersecurity/sra) events into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Claroty/Data%20Connectors/Connector_Claroty_CEF.json","true" -"CommonSecurityLog","Claroty","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Claroty","azuresentinel","azure-sentinel-solution-claroty","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ClarotyAma","Claroty","[Deprecated] Claroty via AMA","The [Claroty](https://claroty.com/) data connector provides the capability to ingest [Continuous Threat Detection](https://claroty.com/resources/datasheets/continuous-threat-detection) and [Secure Remote Access](https://claroty.com/industrial-cybersecurity/sra) events into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Claroty/Data%20Connectors/template_ClarotyAMA.json","true" -"CommonSecurityLog","Claroty xDome","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Claroty%20xDome","claroty1709722359369","microsoft-sentinel-solution-xdome","2024-02-01","","","xDome Customer Support","Partner","https://claroty.com/support-policy","","domains,verticals","ClarotyxDome","Claroty","Claroty xDome","[Claroty](https://claroty.com/) xDome delivers comprehensive security and alert management capabilities for healthcare and industrial network environments. It is designed to map multiple source types, identify the collected data, and integrate it into Microsoft Sentinel data models. This results in the ability to monitor all potential threats in your healthcare and industrial environments in one location, leading to more effective security monitoring and a stronger security posture.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Claroty%20xDome/Data%20Connectors/Claroty_xDome.json","true" -"","Cloud Identity Threat Protection Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cloud%20Identity%20Threat%20Protection%20Essentials","azuresentinel","azure-sentinel-solution-cloudthreatdetection","2022-11-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"","Cloud Service Threat Protection Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cloud%20Service%20Threat%20Protection%20Essentials","azuresentinel","azure-sentinel-solution-cloudservicedetection","2022-11-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"Cloudflare_CL","Cloudflare","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cloudflare","cloudflare","cloudflare_sentinel","2021-10-20","","","Cloudflare","Partner","https://support.cloudflare.com","","domains","CloudflareDataConnector","Cloudflare","[DEPRECATED] Cloudflare","The Cloudflare data connector provides the capability to ingest [Cloudflare logs](https://developers.cloudflare.com/logs/) into Microsoft Sentinel using the Cloudflare Logpush and Azure Blob Storage. Refer to [Cloudflare documentation](https://developers.cloudflare.com/logs/logpush) for more information.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cloudflare/Data%20Connectors/Cloudflare_API_FunctionApp.json","true" -"CloudflareV2_CL","Cloudflare","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cloudflare","cloudflare","cloudflare_sentinel","2021-10-20","","","Cloudflare","Partner","https://support.cloudflare.com","","domains","CloudflareDefinition","Microsoft","Cloudflare (Using Blob Container) (via Codeless Connector Framework)"," The Cloudflare data connector provides the capability to ingest Cloudflare logs into Microsoft Sentinel using the Cloudflare Logpush and Azure Blob Storage. Refer to [Cloudflare documentation](https://developers.cloudflare.com/logs/about/)for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cloudflare/Data%20Connectors/CloudflareLog_CCF/CloudflareLog_ConnectorDefinition.json","true" -"CloudflareV2_CL","Cloudflare CCF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cloudflare%20CCF","cloudflare","azure-sentinel-solution-cloudflare-ccf","2025-09-30","","","Cloudflare","Partner","https://support.cloudflare.com","","domains","CloudflareDefinition","Microsoft","Cloudflare (Using Blob Container) (via Codeless Connector Framework)"," The Cloudflare data connector provides the capability to ingest Cloudflare logs into Microsoft Sentinel using the Cloudflare Logpush and Azure Blob Storage. Refer to [Cloudflare documentation](https://developers.cloudflare.com/logs/about/)for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cloudflare%20CCF/Data%20Connectors/CloudflareLog_CCF/CloudflareLog_ConnectorDefinition.json","true" -"Malware_Data_CL","CofenseIntelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseIntelligence","cofense","cofense-intelligence-sentinel","2023-05-26","2024-05-26","","Cofense Support","Partner","https://cofense.com/contact-support/","","domains","CofenseIntelligence","Cofense","Cofense Intelligence Threat Indicators Ingestion","The [Cofense-Intelligence](https://cofense.com/product-services/phishing-intelligence/) data connector provides the following capabilities:
1. CofenseToSentinel :
>* Get Threat Indicators from the Cofense Intelligence platform and create Threat Intelligence Indicators in Microsoft Sentinel.
2. SentinelToDefender :
>* Get Malware from Cofense Intelligence and post to custom logs table.
3. CofenseIntelligenceMalware :
>* Get Cofense Intelligence Threat Intelligence Indicators from Microsoft Sentinel Threat Intelligence and create/update Indicators in Microsoft Defender for Endpoints.
4. DownloadThreatReports :
>* This data connector will fetch the malware data and create the Link from which we can download Threat Reports.
5. RetryFailedIndicators :
>* This data connector will fetch failed indicators from failed indicators file and retry creating/updating Threat Intelligence indicators in Microsoft Sentinel.


For more details of REST APIs refer to the below documentations:
1. Cofense Intelligence API documentation:
> https://www.threathq.com/docs/rest_api_reference.html
2. Microsoft Threat Intelligence Indicator documentation:
> https://learn.microsoft.com/rest/api/securityinsights/preview/threat-intelligence-indicator
3. Microsoft Defender for Endpoints Indicator documentation:
> https://learn.microsoft.com/microsoft-365/security/defender-endpoint/ti-indicator?view=o365-worldwide","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseIntelligence/Data%20Connectors/CofenseIntelligenceDataConnector/CofenseIntelligence_API_FunctionApp.json","true" -"ThreatIntelligenceIndicator","CofenseIntelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseIntelligence","cofense","cofense-intelligence-sentinel","2023-05-26","2024-05-26","","Cofense Support","Partner","https://cofense.com/contact-support/","","domains","CofenseIntelligence","Cofense","Cofense Intelligence Threat Indicators Ingestion","The [Cofense-Intelligence](https://cofense.com/product-services/phishing-intelligence/) data connector provides the following capabilities:
1. CofenseToSentinel :
>* Get Threat Indicators from the Cofense Intelligence platform and create Threat Intelligence Indicators in Microsoft Sentinel.
2. SentinelToDefender :
>* Get Malware from Cofense Intelligence and post to custom logs table.
3. CofenseIntelligenceMalware :
>* Get Cofense Intelligence Threat Intelligence Indicators from Microsoft Sentinel Threat Intelligence and create/update Indicators in Microsoft Defender for Endpoints.
4. DownloadThreatReports :
>* This data connector will fetch the malware data and create the Link from which we can download Threat Reports.
5. RetryFailedIndicators :
>* This data connector will fetch failed indicators from failed indicators file and retry creating/updating Threat Intelligence indicators in Microsoft Sentinel.


For more details of REST APIs refer to the below documentations:
1. Cofense Intelligence API documentation:
> https://www.threathq.com/docs/rest_api_reference.html
2. Microsoft Threat Intelligence Indicator documentation:
> https://learn.microsoft.com/rest/api/securityinsights/preview/threat-intelligence-indicator
3. Microsoft Defender for Endpoints Indicator documentation:
> https://learn.microsoft.com/microsoft-365/security/defender-endpoint/ti-indicator?view=o365-worldwide","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseIntelligence/Data%20Connectors/CofenseIntelligenceDataConnector/CofenseIntelligence_API_FunctionApp.json","true" -"Cofense_Triage_failed_indicators_CL","CofenseTriage","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseTriage","cofense","cofense-triage-sentinel","2023-03-24","2023-03-24","","Cofense Support","Partner","https://cofense.com/contact-support/","","domains","CofenseTriage","Cofense","Cofense Triage Threat Indicators Ingestion","The [Cofense-Triage](https://cofense.com/product-services/cofense-triage/) data connector provides the following capabilities:
1. CofenseBasedIndicatorCreator :
>* Get Threat Indicators from the Cofense Triage platform and create Threat Intelligence Indicators in Microsoft Sentinel.
> * Ingest Cofense Indicator ID and report links into custom logs table.
2. NonCofenseBasedIndicatorCreatorToCofense :
>* Get Non-Cofense Threat Intelligence Indicators from Microsoft Sentinel Threat Intelligence and create/update Indicators in Cofense Triage platform.
3. IndicatorCreatorToDefender :
>* Get Cofense Triage Threat Intelligence Indicators from Microsoft Sentinel Threat Intelligence and create/update Indicators in Microsoft Defender for Endpoints.
4. RetryFailedIndicators :
>* Get failed indicators from failed indicators files and retry creating/updating Threat Intelligence indicators in Microsoft Sentinel.


For more details of REST APIs refer to the below two documentations:
1. Cofense API documentation:
> https://``/docs/api/v2/index.html
2. Microsoft Threat Intelligence Indicator documentation:
> https://learn.microsoft.com/rest/api/securityinsights/preview/threat-intelligence-indicator
3. Microsoft Defender for Endpoints Indicator documentation:
> https://learn.microsoft.com/microsoft-365/security/defender-endpoint/ti-indicator?view=o365-worldwide","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseTriage/Data%20Connectors/CofenseTriageDataConnector/CofenseTriage_API_FunctionApp.json","true" -"Report_links_data_CL","CofenseTriage","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseTriage","cofense","cofense-triage-sentinel","2023-03-24","2023-03-24","","Cofense Support","Partner","https://cofense.com/contact-support/","","domains","CofenseTriage","Cofense","Cofense Triage Threat Indicators Ingestion","The [Cofense-Triage](https://cofense.com/product-services/cofense-triage/) data connector provides the following capabilities:
1. CofenseBasedIndicatorCreator :
>* Get Threat Indicators from the Cofense Triage platform and create Threat Intelligence Indicators in Microsoft Sentinel.
> * Ingest Cofense Indicator ID and report links into custom logs table.
2. NonCofenseBasedIndicatorCreatorToCofense :
>* Get Non-Cofense Threat Intelligence Indicators from Microsoft Sentinel Threat Intelligence and create/update Indicators in Cofense Triage platform.
3. IndicatorCreatorToDefender :
>* Get Cofense Triage Threat Intelligence Indicators from Microsoft Sentinel Threat Intelligence and create/update Indicators in Microsoft Defender for Endpoints.
4. RetryFailedIndicators :
>* Get failed indicators from failed indicators files and retry creating/updating Threat Intelligence indicators in Microsoft Sentinel.


For more details of REST APIs refer to the below two documentations:
1. Cofense API documentation:
> https://``/docs/api/v2/index.html
2. Microsoft Threat Intelligence Indicator documentation:
> https://learn.microsoft.com/rest/api/securityinsights/preview/threat-intelligence-indicator
3. Microsoft Defender for Endpoints Indicator documentation:
> https://learn.microsoft.com/microsoft-365/security/defender-endpoint/ti-indicator?view=o365-worldwide","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseTriage/Data%20Connectors/CofenseTriageDataConnector/CofenseTriage_API_FunctionApp.json","true" -"ThreatIntelligenceIndicator","CofenseTriage","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseTriage","cofense","cofense-triage-sentinel","2023-03-24","2023-03-24","","Cofense Support","Partner","https://cofense.com/contact-support/","","domains","CofenseTriage","Cofense","Cofense Triage Threat Indicators Ingestion","The [Cofense-Triage](https://cofense.com/product-services/cofense-triage/) data connector provides the following capabilities:
1. CofenseBasedIndicatorCreator :
>* Get Threat Indicators from the Cofense Triage platform and create Threat Intelligence Indicators in Microsoft Sentinel.
> * Ingest Cofense Indicator ID and report links into custom logs table.
2. NonCofenseBasedIndicatorCreatorToCofense :
>* Get Non-Cofense Threat Intelligence Indicators from Microsoft Sentinel Threat Intelligence and create/update Indicators in Cofense Triage platform.
3. IndicatorCreatorToDefender :
>* Get Cofense Triage Threat Intelligence Indicators from Microsoft Sentinel Threat Intelligence and create/update Indicators in Microsoft Defender for Endpoints.
4. RetryFailedIndicators :
>* Get failed indicators from failed indicators files and retry creating/updating Threat Intelligence indicators in Microsoft Sentinel.


For more details of REST APIs refer to the below two documentations:
1. Cofense API documentation:
> https://``/docs/api/v2/index.html
2. Microsoft Threat Intelligence Indicator documentation:
> https://learn.microsoft.com/rest/api/securityinsights/preview/threat-intelligence-indicator
3. Microsoft Defender for Endpoints Indicator documentation:
> https://learn.microsoft.com/microsoft-365/security/defender-endpoint/ti-indicator?view=o365-worldwide","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseTriage/Data%20Connectors/CofenseTriageDataConnector/CofenseTriage_API_FunctionApp.json","true" -"CognniIncidents_CL","Cognni","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cognni","shieldox","cognni_for_microsoft_sentinel","2022-05-06","","","Cognni","Partner","https://cognni.ai/contact-support/","","domains","CognniSentinelDataConnector","Cognni","Cognni","The Cognni connector offers a quick and simple integration with Microsoft Sentinel. You can use Cognni to autonomously map your previously unclassified important information and detect related incidents. This allows you to recognize risks to your important information, understand the severity of the incidents, and investigate the details you need to remediate, fast enough to make a difference.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cognni/Data%20Connectors/CognniSentinelConnector.json","true" -"ThreatIntelligenceIndicator","CognyteLuminar","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CognyteLuminar","cognytetechnologiesisraelltd","microsoft-sentinel-solution-cognyte-luminar","2023-09-15","","","Cognyte Luminar","Partner","https://www.cognyte.com/contact/","","domains","CognyteLuminar","Cognyte Technologies Israel Ltd","Luminar IOCs and Leaked Credentials","Luminar IOCs and Leaked Credentials connector allows integration of intelligence-based IOC data and customer-related leaked records identified by Luminar.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CognyteLuminar/Data%20Connectors/CognyteLuminar_FunctionApp.json","true" -"Cohesity_CL","CohesitySecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CohesitySecurity","cohesitydev1592001764720","cohesity_sentinel_data_connector","2022-10-10","","","Cohesity","Partner","https://support.cohesity.com/","","domains","CohesityDataConnector","Cohesity","Cohesity","The Cohesity function apps provide the ability to ingest Cohesity Datahawk ransomware alerts into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CohesitySecurity/Data%20Connectors/Helios2Sentinel/Cohesity_API_FunctionApp.json","true" -"CommonSecurityLog","Common Event Format","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Common%20Event%20Format","azuresentinel","azure-sentinel-solution-commoneventformat","2022-05-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CEF","Any","Common Event Format (CEF)","Common Event Format (CEF) is an industry standard format on top of Syslog messages, used by many security vendors to allow event interoperability among different platforms. By connecting your CEF logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2223902&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Common%20Event%20Format/Data%20Connectors/CEF.JSON","true" -"CommonSecurityLog","Common Event Format","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Common%20Event%20Format","azuresentinel","azure-sentinel-solution-commoneventformat","2022-05-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CefAma","Microsoft","Common Event Format (CEF) via AMA","Common Event Format (CEF) is an industry standard format on top of Syslog messages, used by many security vendors to allow event interoperability among different platforms. By connecting your CEF logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2223547&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Common%20Event%20Format/Data%20Connectors/CEF%20AMA.JSON","true" -"CommvaultSecurityIQ_CL","Commvault Security IQ","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Commvault%20Security%20IQ","commvault","microsoft-sentinel-solution-commvaultsecurityiq","2023-08-17","","","Commvault","Partner","https://www.commvault.com/support","","domains","CommvaultSecurityIQ_CL","Commvault","CommvaultSecurityIQ","This Azure Function enables Commvault users to ingest alerts/events into their Microsoft Sentinel instance. With Analytic Rules,Microsoft Sentinel can automatically create Microsoft Sentinel incidents from incoming events and logs.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Commvault%20Security%20IQ/Data%20Connectors/CommvaultSecurityIQ_API_AzureFunctionApp.json","true" -"","ContinuousDiagnostics&Mitigation","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ContinuousDiagnostics%26Mitigation","azuresentinel","azure-sentinel-solution-continuousdiagnostics","2022-08-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","false" -"CommonSecurityLog","Contrast Protect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Contrast%20Protect","contrast_security","contrast_protect_azure_sentinel_solution","2021-10-20","","","Contrast Protect","Partner","https://docs.contrastsecurity.com/","","domains","ContrastProtect","Contrast Security","[Deprecated] Contrast Protect via Legacy Agent","Contrast Protect mitigates security threats in production applications with runtime protection and observability. Attack event results (blocked, probed, suspicious...) and other information can be sent to Microsoft Microsoft Sentinel to blend with security information from other systems.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Contrast%20Protect/Data%20Connectors/ContrastProtect.json","true" -"CommonSecurityLog","Contrast Protect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Contrast%20Protect","contrast_security","contrast_protect_azure_sentinel_solution","2021-10-20","","","Contrast Protect","Partner","https://docs.contrastsecurity.com/","","domains","ContrastProtectAma","Contrast Security","[Deprecated] Contrast Protect via AMA","Contrast Protect mitigates security threats in production applications with runtime protection and observability. Attack event results (blocked, probed, suspicious...) and other information can be sent to Microsoft Microsoft Sentinel to blend with security information from other systems.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Contrast%20Protect/Data%20Connectors/template_ContrastProtectAMA.json","true" -"ContrastADRIncident_CL","ContrastADR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ContrastADR","contrast_security","contrast_adr_azure_sentinel_solution","2025-01-18","2025-01-18","","Contrast Security","Partner","https://support.contrastsecurity.com/hc/en-us","","domains","ContrastADR","Contrast Security","ContrastADR","The ContrastADR data connector provides the capability to ingest Contrast ADR attack events into Microsoft Sentinel using the ContrastADR Webhook. ContrastADR data connector can enrich the incoming webhook data with ContrastADR API enrichment calls.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ContrastADR/Data%20Connectors/ContrastADR_API_FunctionApp.json","true" -"ContrastADR_CL","ContrastADR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ContrastADR","contrast_security","contrast_adr_azure_sentinel_solution","2025-01-18","2025-01-18","","Contrast Security","Partner","https://support.contrastsecurity.com/hc/en-us","","domains","ContrastADR","Contrast Security","ContrastADR","The ContrastADR data connector provides the capability to ingest Contrast ADR attack events into Microsoft Sentinel using the ContrastADR Webhook. ContrastADR data connector can enrich the incoming webhook data with ContrastADR API enrichment calls.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ContrastADR/Data%20Connectors/ContrastADR_API_FunctionApp.json","true" -"Corelight_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_bacnet_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_capture_loss_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_cip_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_conn_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_conn_long_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_conn_red_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_corelight_burst_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_corelight_overall_capture_loss_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_corelight_profiling_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_datared_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_dce_rpc_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_dga_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_dhcp_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_dnp3_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_dns_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_dns_red_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_dpd_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_encrypted_dns_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_enip_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_enip_debug_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_enip_list_identity_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_etc_viz_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_files_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_files_red_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_ftp_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_generic_dns_tunnels_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_generic_icmp_tunnels_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_http2_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_http_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_http_red_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_icmp_specific_tunnels_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_intel_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_ipsec_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_irc_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_iso_cotp_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_kerberos_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_known_certs_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_known_devices_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_known_domains_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_known_hosts_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_known_names_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_known_remotes_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_known_services_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_known_users_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_local_subnets_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_local_subnets_dj_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_local_subnets_graphs_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_log4shell_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_modbus_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_mqtt_connect_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_mqtt_publish_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_mqtt_subscribe_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_mysql_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_notice_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_ntlm_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_ntp_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_ocsp_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_openflow_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_packet_filter_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_pe_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_profinet_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_profinet_dce_rpc_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_profinet_debug_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_radius_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_rdp_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_reporter_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_rfb_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_s7comm_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_signatures_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_sip_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_smartpcap_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_smartpcap_stats_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_smb_files_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_smb_mapping_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_smtp_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_smtp_links_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_snmp_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_socks_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_software_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_specific_dns_tunnels_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_ssh_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_ssl_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_ssl_red_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_stats_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_stepping_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_stun_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_stun_nat_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_suricata_corelight_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_suricata_eve_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_suricata_stats_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_suricata_zeek_stats_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_syslog_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_tds_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_tds_rpc_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_tds_sql_batch_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_traceroute_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_tunnel_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_unknown_smartpcap_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_util_stats_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_vpn_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_weird_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_weird_red_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_weird_stats_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_wireguard_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_x509_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_x509_red_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_zeek_doctor_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"PaloAltoCortexXDR_Alerts_CL","Cortex XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR","defendlimited1682894612656","cortex_xdr_connector","2023-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" -"PaloAltoCortexXDR_Audit_Agent_CL","Cortex XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR","defendlimited1682894612656","cortex_xdr_connector","2023-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" -"PaloAltoCortexXDR_Audit_Management_CL","Cortex XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR","defendlimited1682894612656","cortex_xdr_connector","2023-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" -"PaloAltoCortexXDR_Endpoints_CL","Cortex XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR","defendlimited1682894612656","cortex_xdr_connector","2023-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" -"PaloAltoCortexXDR_Incidents_CL","Cortex XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR","defendlimited1682894612656","cortex_xdr_connector","2023-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" -"CriblAccess_CL","Cribl","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cribl","criblinc1673975616879","microsoft-sentinel-solution-cribl","2024-08-01","2024-09-05","","Cribl","Partner","https://www.cribl.io/support/","","domains","Cribl","Cribl","Cribl","The [Cribl](https://cribl.io/accelerate-cloud-migration/) connector allows you to easily connect your Cribl (Cribl Enterprise Edition - Standalone) logs with Microsoft Sentinel. This gives you more security insight into your organization's data pipelines.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cribl/Data%20Connectors/Connector_Cribl.json","true" -"CriblAudit_CL","Cribl","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cribl","criblinc1673975616879","microsoft-sentinel-solution-cribl","2024-08-01","2024-09-05","","Cribl","Partner","https://www.cribl.io/support/","","domains","Cribl","Cribl","Cribl","The [Cribl](https://cribl.io/accelerate-cloud-migration/) connector allows you to easily connect your Cribl (Cribl Enterprise Edition - Standalone) logs with Microsoft Sentinel. This gives you more security insight into your organization's data pipelines.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cribl/Data%20Connectors/Connector_Cribl.json","true" -"CriblInternal_CL","Cribl","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cribl","criblinc1673975616879","microsoft-sentinel-solution-cribl","2024-08-01","2024-09-05","","Cribl","Partner","https://www.cribl.io/support/","","domains","Cribl","Cribl","Cribl","The [Cribl](https://cribl.io/accelerate-cloud-migration/) connector allows you to easily connect your Cribl (Cribl Enterprise Edition - Standalone) logs with Microsoft Sentinel. This gives you more security insight into your organization's data pipelines.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cribl/Data%20Connectors/Connector_Cribl.json","true" -"CriblUIAccess_CL","Cribl","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cribl","criblinc1673975616879","microsoft-sentinel-solution-cribl","2024-08-01","2024-09-05","","Cribl","Partner","https://www.cribl.io/support/","","domains","Cribl","Cribl","Cribl","The [Cribl](https://cribl.io/accelerate-cloud-migration/) connector allows you to easily connect your Cribl (Cribl Enterprise Edition - Standalone) logs with Microsoft Sentinel. This gives you more security insight into your organization's data pipelines.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cribl/Data%20Connectors/Connector_Cribl.json","true" -"CrowdStrikeAlerts","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeAPICCPDefinition","Microsoft","CrowdStrike API Data Connector (via Codeless Connector Framework)","The [CrowdStrike Data Connector](https://www.crowdstrike.com/) allows ingesting logs from the CrowdStrike API into Microsoft Sentinel. This connector is built on the Microsoft Sentinel Codeless Connector Platform and uses the CrowdStrike API to fetch logs for Alerts, Detections, Hosts, Incidents, and Vulnerabilities. It supports DCR-based ingestion time transformations so that queries can run more efficiently.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeAPI_ccp/CrowdStrikeAPI_Definition.json","true" -"CrowdStrikeDetections","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeAPICCPDefinition","Microsoft","CrowdStrike API Data Connector (via Codeless Connector Framework)","The [CrowdStrike Data Connector](https://www.crowdstrike.com/) allows ingesting logs from the CrowdStrike API into Microsoft Sentinel. This connector is built on the Microsoft Sentinel Codeless Connector Platform and uses the CrowdStrike API to fetch logs for Alerts, Detections, Hosts, Incidents, and Vulnerabilities. It supports DCR-based ingestion time transformations so that queries can run more efficiently.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeAPI_ccp/CrowdStrikeAPI_Definition.json","true" -"CrowdStrikeHosts","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeAPICCPDefinition","Microsoft","CrowdStrike API Data Connector (via Codeless Connector Framework)","The [CrowdStrike Data Connector](https://www.crowdstrike.com/) allows ingesting logs from the CrowdStrike API into Microsoft Sentinel. This connector is built on the Microsoft Sentinel Codeless Connector Platform and uses the CrowdStrike API to fetch logs for Alerts, Detections, Hosts, Incidents, and Vulnerabilities. It supports DCR-based ingestion time transformations so that queries can run more efficiently.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeAPI_ccp/CrowdStrikeAPI_Definition.json","true" -"CrowdStrikeIncidents","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeAPICCPDefinition","Microsoft","CrowdStrike API Data Connector (via Codeless Connector Framework)","The [CrowdStrike Data Connector](https://www.crowdstrike.com/) allows ingesting logs from the CrowdStrike API into Microsoft Sentinel. This connector is built on the Microsoft Sentinel Codeless Connector Platform and uses the CrowdStrike API to fetch logs for Alerts, Detections, Hosts, Incidents, and Vulnerabilities. It supports DCR-based ingestion time transformations so that queries can run more efficiently.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeAPI_ccp/CrowdStrikeAPI_Definition.json","true" -"CrowdStrikeVulnerabilities","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeAPICCPDefinition","Microsoft","CrowdStrike API Data Connector (via Codeless Connector Framework)","The [CrowdStrike Data Connector](https://www.crowdstrike.com/) allows ingesting logs from the CrowdStrike API into Microsoft Sentinel. This connector is built on the Microsoft Sentinel Codeless Connector Platform and uses the CrowdStrike API to fetch logs for Alerts, Detections, Hosts, Incidents, and Vulnerabilities. It supports DCR-based ingestion time transformations so that queries can run more efficiently.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeAPI_ccp/CrowdStrikeAPI_Definition.json","true" -"ThreatIntelligenceIndicator","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconAdversaryIntelligence","CrowdStrike","CrowdStrike Falcon Adversary Intelligence ","The [CrowdStrike](https://www.crowdstrike.com/) Falcon Indicators of Compromise connector retrieves the Indicators of Compromise from the Falcon Intel API and uploads them [Microsoft Sentinel Threat Intel](https://learn.microsoft.com/en-us/azure/sentinel/understand-threat-intelligence).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeFalconAdversaryIntelligence/CrowdStrikeFalconAdversaryIntelligence_FunctionApp.json","true" -"CommonSecurityLog","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconEndpointProtection","CrowdStrike","[Deprecated] CrowdStrike Falcon Endpoint Protection via Legacy Agent","The [CrowdStrike Falcon Endpoint Protection](https://www.crowdstrike.com/endpoint-security-products/) connector allows you to easily connect your CrowdStrike Falcon Event Stream with Microsoft Sentinel, to create custom dashboards, alerts, and improve investigation. This gives you more insight into your organization's endpoints and improves your security operation capabilities.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/Connector_Syslog_CrowdStrikeFalconEndpointProtection.json","true" -"CommonSecurityLog","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconEndpointProtectionAma","CrowdStrike","[Deprecated] CrowdStrike Falcon Endpoint Protection via AMA","The [CrowdStrike Falcon Endpoint Protection](https://www.crowdstrike.com/endpoint-security-products/) connector allows you to easily connect your CrowdStrike Falcon Event Stream with Microsoft Sentinel, to create custom dashboards, alerts, and improve investigation. This gives you more insight into your organization's endpoints and improves your security operation capabilities.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/template_CrowdStrikeFalconEndpointProtectionAma.json","true" -"CrowdStrike_Additional_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" -"CrowdStrike_Audit_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" -"CrowdStrike_Auth_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" -"CrowdStrike_DNS_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" -"CrowdStrike_File_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" -"CrowdStrike_Network_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" -"CrowdStrike_Process_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" -"CrowdStrike_Registry_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" -"CrowdStrike_Secondary_Data_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" -"CrowdStrike_User_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" -"ASimAuditEventLogs","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" -"ASimAuthenticationEventLogs","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" -"ASimAuthenticationEventLogs_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" -"ASimDnsActivityLogs","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" -"ASimFileEventLogs","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" -"ASimFileEventLogs_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" -"ASimNetworkSessionLogs","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" -"ASimProcessEventLogs","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" -"ASimProcessEventLogs_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" -"ASimRegistryEventLogs","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" -"ASimRegistryEventLogs_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" -"ASimUserManagementActivityLogs","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" -"ASimUserManagementLogs_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" -"CrowdStrike_Additional_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" -"CrowdStrike_Secondary_Data_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" -"","CustomLogsAma","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CustomLogsAma","azuresentinel","azure-sentinel-solution-customlogsviaama","2024-07-21","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"CommonSecurityLog","CyberArk Enterprise Password Vault (EPV) Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArk%20Enterprise%20Password%20Vault%20%28EPV%29%20Events","cyberark","cyberark_epv_events_mss","2022-05-02","","","Cyberark","Partner","https://www.cyberark.com/services-support/technical-support/","","domains","CyberArk","Cyber-Ark","[Deprecated] CyberArk Enterprise Password Vault (EPV) Events via Legacy Agent","CyberArk Enterprise Password Vault generates an xml Syslog message for every action taken against the Vault. The EPV will send the xml messages through the Microsoft Sentinel.xsl translator to be converted into CEF standard format and sent to a syslog staging server of your choice (syslog-ng, rsyslog). The Log Analytics agent installed on your syslog staging server will import the messages into Microsoft Log Analytics. Refer to the [CyberArk documentation](https://docs.cyberark.com/Product-Doc/OnlineHelp/PAS/Latest/en/Content/PASIMP/DV-Integrating-with-SIEM-Applications.htm) for more guidance on SIEM integrations.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArk%20Enterprise%20Password%20Vault%20%28EPV%29%20Events/Data%20Connectors/CyberArk%20Data%20Connector.json","true" -"CommonSecurityLog","CyberArk Enterprise Password Vault (EPV) Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArk%20Enterprise%20Password%20Vault%20%28EPV%29%20Events","cyberark","cyberark_epv_events_mss","2022-05-02","","","Cyberark","Partner","https://www.cyberark.com/services-support/technical-support/","","domains","CyberArkAma","Cyber-Ark","[Deprecated] CyberArk Privilege Access Manager (PAM) Events via AMA","CyberArk Privilege Access Manager generates an xml Syslog message for every action taken against the Vault. The PAM will send the xml messages through the Microsoft Sentinel.xsl translator to be converted into CEF standard format and sent to a syslog staging server of your choice (syslog-ng, rsyslog). The Log Analytics agent installed on your syslog staging server will import the messages into Microsoft Log Analytics. Refer to the [CyberArk documentation](https://docs.cyberark.com/privilege-cloud-standard/Latest/en/Content/Privilege%20Cloud/privCloud-connect-siem.htm) for more guidance on SIEM integrations.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArk%20Enterprise%20Password%20Vault%20%28EPV%29%20Events/Data%20Connectors/template_CyberArkAMA.json","true" -"CyberArkAudit","CyberArkAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArkAudit","cyberark","cyberark_audit_sentinel","2024-03-01","","","CyberArk Support","Partner","https://www.cyberark.com/services-support/technical-support-contact/","","domains","CyberArkAudit","CyberArk","CyberArkAudit","The [CyberArk Audit](https://docs.cyberark.com/Audit/Latest/en/Content/Resources/_TopNav/cc_Home.htm) data connector provides the capability to retrieve security event logs of the CyberArk Audit service and more events into Microsoft Sentinel through the REST API. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArkAudit/Data%20Connectors/CyberArkAudit_API_FunctionApp.json","true" -"CyberArk_AuditEvents_CL","CyberArkAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArkAudit","cyberark","cyberark_audit_sentinel","2024-03-01","","","CyberArk Support","Partner","https://www.cyberark.com/services-support/technical-support-contact/","","domains","CyberArkAudit","CyberArk","CyberArkAudit","The [CyberArk Audit](https://docs.cyberark.com/Audit/Latest/en/Content/Resources/_TopNav/cc_Home.htm) data connector provides the capability to retrieve security event logs of the CyberArk Audit service and more events into Microsoft Sentinel through the REST API. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArkAudit/Data%20Connectors/CyberArkAudit_API_FunctionApp.json","true" -"","CyberArkEPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArkEPM","cyberark","cybr_epm_sentinel","2022-04-10","","","CyberArk Support","Partner","https://www.cyberark.com/services-support/technical-support-contact/","","domains","","","","","","false" -"","CybersecurityMaturityModelCertification(CMMC)2.0","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CybersecurityMaturityModelCertification%28CMMC%292.0","azuresentinel","azure-sentinel-solution-cmmcv2","2022-01-06","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"CyberSixgill_Alerts_CL","Cybersixgill-Actionable-Alerts","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cybersixgill-Actionable-Alerts","cybersixgill1657701397011","azure-sentinel-cybersixgill-actionable-alerts","2023-02-27","2024-09-24","","Cybersixgill","Partner","https://www.cybersixgill.com/","","domains","CybersixgillActionableAlerts","Cybersixgill","Cybersixgill Actionable Alerts","Actionable alerts provide customized alerts based on configured assets","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cybersixgill-Actionable-Alerts/Data%20Connectors/Cybersixgill_FunctionApp.json","true" -"","Cyble Vision","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyble%20Vision","cybleinc1737472004964","cybleinc1737472004964-azure-sentinel-offerid","2025-05-05","","","Cyble Support","Partner","https://cyble.com/talk-to-sales/","","domains","","","","","","false" -"SecurityEvent","Cyborg Security HUNTER","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyborg%20Security%20HUNTER","cyborgsecurityinc1689265652101","azure-sentinel-solution-cyborgsecurity-hunter","2023-07-03","2023-09-22","","Cyborg Security","Partner","https://hunter.cyborgsecurity.io/customer-support","","domains","CyborgSecurity_HUNTER","Cyborg Security","Cyborg Security HUNTER Hunt Packages","Cyborg Security is a leading provider of advanced threat hunting solutions, with a mission to empower organizations with cutting-edge technology and collaborative tools to proactively detect and respond to cyber threats. Cyborg Security's flagship offering, the HUNTER Platform, combines powerful analytics, curated threat hunting content, and comprehensive hunt management capabilities to create a dynamic ecosystem for effective threat hunting operations.

Follow the steps to gain access to Cyborg Security's Community and setup the 'Open in Tool' capabilities in the HUNTER Platform.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyborg%20Security%20HUNTER/Data%20Connectors/CyborgSecurity_HUNTER.json","true" -"CyeraAssets_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyeradspm","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraDSPMCCF","Cyera Inc","Cyera DSPM Azure Sentinel Data Connector","The [Cyera DSPM](https://api.cyera.io/) data connector allows you to connect to your Cyera's DSPM tenant and ingesting Classifications, Assets, Issues, and Identity Resources/Definitions into Microsoft Sentinel. The data connector is built on Microsoft Sentinel's Codeless Connector Framework and uses the Cyera's API to fetch Cyera's [DSPM Telemetry](https://www.cyera.com/) once recieced can be correlated with security events creating custom columns so that queries don't need to parse it again, thus resulting in better performance.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_CCF/CyeraDSPMLogs_ConnectorDefinitionCCF.json","true" -"CyeraAssets_MS_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyeradspm","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraDSPMCCF","Cyera Inc","Cyera DSPM Azure Sentinel Data Connector","The [Cyera DSPM](https://api.cyera.io/) data connector allows you to connect to your Cyera's DSPM tenant and ingesting Classifications, Assets, Issues, and Identity Resources/Definitions into Microsoft Sentinel. The data connector is built on Microsoft Sentinel's Codeless Connector Framework and uses the Cyera's API to fetch Cyera's [DSPM Telemetry](https://www.cyera.com/) once recieced can be correlated with security events creating custom columns so that queries don't need to parse it again, thus resulting in better performance.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_CCF/CyeraDSPMLogs_ConnectorDefinitionCCF.json","true" -"CyeraClassifications_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyeradspm","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraDSPMCCF","Cyera Inc","Cyera DSPM Azure Sentinel Data Connector","The [Cyera DSPM](https://api.cyera.io/) data connector allows you to connect to your Cyera's DSPM tenant and ingesting Classifications, Assets, Issues, and Identity Resources/Definitions into Microsoft Sentinel. The data connector is built on Microsoft Sentinel's Codeless Connector Framework and uses the Cyera's API to fetch Cyera's [DSPM Telemetry](https://www.cyera.com/) once recieced can be correlated with security events creating custom columns so that queries don't need to parse it again, thus resulting in better performance.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_CCF/CyeraDSPMLogs_ConnectorDefinitionCCF.json","true" -"CyeraIdentities_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyeradspm","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraDSPMCCF","Cyera Inc","Cyera DSPM Azure Sentinel Data Connector","The [Cyera DSPM](https://api.cyera.io/) data connector allows you to connect to your Cyera's DSPM tenant and ingesting Classifications, Assets, Issues, and Identity Resources/Definitions into Microsoft Sentinel. The data connector is built on Microsoft Sentinel's Codeless Connector Framework and uses the Cyera's API to fetch Cyera's [DSPM Telemetry](https://www.cyera.com/) once recieced can be correlated with security events creating custom columns so that queries don't need to parse it again, thus resulting in better performance.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_CCF/CyeraDSPMLogs_ConnectorDefinitionCCF.json","true" -"CyeraIssues_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyeradspm","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraDSPMCCF","Cyera Inc","Cyera DSPM Azure Sentinel Data Connector","The [Cyera DSPM](https://api.cyera.io/) data connector allows you to connect to your Cyera's DSPM tenant and ingesting Classifications, Assets, Issues, and Identity Resources/Definitions into Microsoft Sentinel. The data connector is built on Microsoft Sentinel's Codeless Connector Framework and uses the Cyera's API to fetch Cyera's [DSPM Telemetry](https://www.cyera.com/) once recieced can be correlated with security events creating custom columns so that queries don't need to parse it again, thus resulting in better performance.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_CCF/CyeraDSPMLogs_ConnectorDefinitionCCF.json","true" -"CyeraAssets_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyeradspm","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraFunctionsConnector","Cyera Inc","Cyera DSPM Azure Functions Sentinel Data Connector","The **Cyera DSPM Azure Function Connector** enables seamless ingestion of Cyera’s **Data Security Posture Management (DSPM)** telemetry — *Assets*, *Identities*, *Issues*, and *Classifications* — into **Microsoft Sentinel**.\n\nThis connector uses an **Azure Function App** to call Cyera’s REST API on a schedule, fetch the latest DSPM telemetry, and send it to Sentinel through the **Azure Monitor Logs Ingestion API** via a **Data Collection Endpoint (DCE)** and **Data Collection Rule (DCR, kind: Direct)** — no agents required.\n\n**Tables created/used**\n\n| Entity | Table | Purpose |\n|---|---|---|\n| Assets | `CyeraAssets_CL` | Raw asset metadata and data-store context |\n| Identities | `CyeraIdentities_CL` | Identity definitions and sensitivity context |\n| Issues | `CyeraIssues_CL` | Findings and remediation details |\n| Classifications | `CyeraClassifications_CL` | Data class & sensitivity definitions |\n| MS View | `CyeraAssets_MS_CL` | Normalized asset view for dashboards |\n\n> **Note:** This v7 connector supersedes the earlier CCF-based approach and aligns with Microsoft’s recommended Direct ingestion path for Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_Functions/FunctionAppDC.json","true" -"CyeraAssets_MS_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyeradspm","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraFunctionsConnector","Cyera Inc","Cyera DSPM Azure Functions Sentinel Data Connector","The **Cyera DSPM Azure Function Connector** enables seamless ingestion of Cyera’s **Data Security Posture Management (DSPM)** telemetry — *Assets*, *Identities*, *Issues*, and *Classifications* — into **Microsoft Sentinel**.\n\nThis connector uses an **Azure Function App** to call Cyera’s REST API on a schedule, fetch the latest DSPM telemetry, and send it to Sentinel through the **Azure Monitor Logs Ingestion API** via a **Data Collection Endpoint (DCE)** and **Data Collection Rule (DCR, kind: Direct)** — no agents required.\n\n**Tables created/used**\n\n| Entity | Table | Purpose |\n|---|---|---|\n| Assets | `CyeraAssets_CL` | Raw asset metadata and data-store context |\n| Identities | `CyeraIdentities_CL` | Identity definitions and sensitivity context |\n| Issues | `CyeraIssues_CL` | Findings and remediation details |\n| Classifications | `CyeraClassifications_CL` | Data class & sensitivity definitions |\n| MS View | `CyeraAssets_MS_CL` | Normalized asset view for dashboards |\n\n> **Note:** This v7 connector supersedes the earlier CCF-based approach and aligns with Microsoft’s recommended Direct ingestion path for Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_Functions/FunctionAppDC.json","true" -"CyeraClassifications_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyeradspm","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraFunctionsConnector","Cyera Inc","Cyera DSPM Azure Functions Sentinel Data Connector","The **Cyera DSPM Azure Function Connector** enables seamless ingestion of Cyera’s **Data Security Posture Management (DSPM)** telemetry — *Assets*, *Identities*, *Issues*, and *Classifications* — into **Microsoft Sentinel**.\n\nThis connector uses an **Azure Function App** to call Cyera’s REST API on a schedule, fetch the latest DSPM telemetry, and send it to Sentinel through the **Azure Monitor Logs Ingestion API** via a **Data Collection Endpoint (DCE)** and **Data Collection Rule (DCR, kind: Direct)** — no agents required.\n\n**Tables created/used**\n\n| Entity | Table | Purpose |\n|---|---|---|\n| Assets | `CyeraAssets_CL` | Raw asset metadata and data-store context |\n| Identities | `CyeraIdentities_CL` | Identity definitions and sensitivity context |\n| Issues | `CyeraIssues_CL` | Findings and remediation details |\n| Classifications | `CyeraClassifications_CL` | Data class & sensitivity definitions |\n| MS View | `CyeraAssets_MS_CL` | Normalized asset view for dashboards |\n\n> **Note:** This v7 connector supersedes the earlier CCF-based approach and aligns with Microsoft’s recommended Direct ingestion path for Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_Functions/FunctionAppDC.json","true" -"CyeraIdentities_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyeradspm","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraFunctionsConnector","Cyera Inc","Cyera DSPM Azure Functions Sentinel Data Connector","The **Cyera DSPM Azure Function Connector** enables seamless ingestion of Cyera’s **Data Security Posture Management (DSPM)** telemetry — *Assets*, *Identities*, *Issues*, and *Classifications* — into **Microsoft Sentinel**.\n\nThis connector uses an **Azure Function App** to call Cyera’s REST API on a schedule, fetch the latest DSPM telemetry, and send it to Sentinel through the **Azure Monitor Logs Ingestion API** via a **Data Collection Endpoint (DCE)** and **Data Collection Rule (DCR, kind: Direct)** — no agents required.\n\n**Tables created/used**\n\n| Entity | Table | Purpose |\n|---|---|---|\n| Assets | `CyeraAssets_CL` | Raw asset metadata and data-store context |\n| Identities | `CyeraIdentities_CL` | Identity definitions and sensitivity context |\n| Issues | `CyeraIssues_CL` | Findings and remediation details |\n| Classifications | `CyeraClassifications_CL` | Data class & sensitivity definitions |\n| MS View | `CyeraAssets_MS_CL` | Normalized asset view for dashboards |\n\n> **Note:** This v7 connector supersedes the earlier CCF-based approach and aligns with Microsoft’s recommended Direct ingestion path for Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_Functions/FunctionAppDC.json","true" -"CyeraIssues_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyeradspm","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraFunctionsConnector","Cyera Inc","Cyera DSPM Azure Functions Sentinel Data Connector","The **Cyera DSPM Azure Function Connector** enables seamless ingestion of Cyera’s **Data Security Posture Management (DSPM)** telemetry — *Assets*, *Identities*, *Issues*, and *Classifications* — into **Microsoft Sentinel**.\n\nThis connector uses an **Azure Function App** to call Cyera’s REST API on a schedule, fetch the latest DSPM telemetry, and send it to Sentinel through the **Azure Monitor Logs Ingestion API** via a **Data Collection Endpoint (DCE)** and **Data Collection Rule (DCR, kind: Direct)** — no agents required.\n\n**Tables created/used**\n\n| Entity | Table | Purpose |\n|---|---|---|\n| Assets | `CyeraAssets_CL` | Raw asset metadata and data-store context |\n| Identities | `CyeraIdentities_CL` | Identity definitions and sensitivity context |\n| Issues | `CyeraIssues_CL` | Findings and remediation details |\n| Classifications | `CyeraClassifications_CL` | Data class & sensitivity definitions |\n| MS View | `CyeraAssets_MS_CL` | Normalized asset view for dashboards |\n\n> **Note:** This v7 connector supersedes the earlier CCF-based approach and aligns with Microsoft’s recommended Direct ingestion path for Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_Functions/FunctionAppDC.json","true" -"CyfirmaASCertificatesAlerts_CL","Cyfirma Attack Surface","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-attack-surface","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaAttackSurfaceAlertsConnector","Microsoft","CYFIRMA Attack Surface","","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface/Data%20Connectors/CyfirmaASAlerts_ccp/CyfirmaASAlerts_DataConnectorDefinition.json","true" -"CyfirmaASCloudWeaknessAlerts_CL","Cyfirma Attack Surface","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-attack-surface","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaAttackSurfaceAlertsConnector","Microsoft","CYFIRMA Attack Surface","","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface/Data%20Connectors/CyfirmaASAlerts_ccp/CyfirmaASAlerts_DataConnectorDefinition.json","true" -"CyfirmaASConfigurationAlerts_CL","Cyfirma Attack Surface","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-attack-surface","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaAttackSurfaceAlertsConnector","Microsoft","CYFIRMA Attack Surface","","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface/Data%20Connectors/CyfirmaASAlerts_ccp/CyfirmaASAlerts_DataConnectorDefinition.json","true" -"CyfirmaASDomainIPReputationAlerts_CL","Cyfirma Attack Surface","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-attack-surface","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaAttackSurfaceAlertsConnector","Microsoft","CYFIRMA Attack Surface","","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface/Data%20Connectors/CyfirmaASAlerts_ccp/CyfirmaASAlerts_DataConnectorDefinition.json","true" -"CyfirmaASDomainIPVulnerabilityAlerts_CL","Cyfirma Attack Surface","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-attack-surface","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaAttackSurfaceAlertsConnector","Microsoft","CYFIRMA Attack Surface","","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface/Data%20Connectors/CyfirmaASAlerts_ccp/CyfirmaASAlerts_DataConnectorDefinition.json","true" -"CyfirmaASOpenPortsAlerts_CL","Cyfirma Attack Surface","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-attack-surface","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaAttackSurfaceAlertsConnector","Microsoft","CYFIRMA Attack Surface","","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface/Data%20Connectors/CyfirmaASAlerts_ccp/CyfirmaASAlerts_DataConnectorDefinition.json","true" -"CyfirmaBIDomainITAssetAlerts_CL","Cyfirma Brand Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-brand-intelligence","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaBrandIntelligenceAlertsDC","Microsoft","CYFIRMA Brand Intelligence","","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence/Data%20Connectors/CyfirmaBIAlerts_ccp/CyfirmaBIAlerts_DataConnectorDefinition.json","true" -"CyfirmaBIExecutivePeopleAlerts_CL","Cyfirma Brand Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-brand-intelligence","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaBrandIntelligenceAlertsDC","Microsoft","CYFIRMA Brand Intelligence","","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence/Data%20Connectors/CyfirmaBIAlerts_ccp/CyfirmaBIAlerts_DataConnectorDefinition.json","true" -"CyfirmaBIMaliciousMobileAppsAlerts_CL","Cyfirma Brand Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-brand-intelligence","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaBrandIntelligenceAlertsDC","Microsoft","CYFIRMA Brand Intelligence","","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence/Data%20Connectors/CyfirmaBIAlerts_ccp/CyfirmaBIAlerts_DataConnectorDefinition.json","true" -"CyfirmaBIProductSolutionAlerts_CL","Cyfirma Brand Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-brand-intelligence","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaBrandIntelligenceAlertsDC","Microsoft","CYFIRMA Brand Intelligence","","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence/Data%20Connectors/CyfirmaBIAlerts_ccp/CyfirmaBIAlerts_DataConnectorDefinition.json","true" -"CyfirmaBISocialHandlersAlerts_CL","Cyfirma Brand Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-brand-intelligence","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaBrandIntelligenceAlertsDC","Microsoft","CYFIRMA Brand Intelligence","","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence/Data%20Connectors/CyfirmaBIAlerts_ccp/CyfirmaBIAlerts_DataConnectorDefinition.json","true" -"CyfirmaCompromisedAccounts_CL","Cyfirma Compromised Accounts","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Compromised%20Accounts","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirmacompromisedaccounts","2025-05-15","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaCompromisedAccountsDataConnector","Microsoft","CYFIRMA Compromised Accounts","The CYFIRMA Compromised Accounts data connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR/DeTCT API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Compromised%20Accounts/Data%20Connectors/CyfirmaCompromisedAccounts_ccp/CyfirmaCompAcc_DataConnectorDefinition.json","true" -"CyfirmaCampaigns_CL","Cyfirma Cyber Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Cyber%20Intelligence","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-cyber-intelligence","2025-05-15","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaCyberIntelligenceDC","Microsoft","CYFIRMA Cyber Intelligence","The CYFIRMA Cyber Intelligence data connector enables seamless log ingestion from the DeCYFIR API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Cyber%20Intelligence/Data%20Connectors/CyfirmaCyberIntelligence_ccp/CyfirmaCyberIntel_DataConnectorDefinition.json","true" -"CyfirmaIndicators_CL","Cyfirma Cyber Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Cyber%20Intelligence","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-cyber-intelligence","2025-05-15","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaCyberIntelligenceDC","Microsoft","CYFIRMA Cyber Intelligence","The CYFIRMA Cyber Intelligence data connector enables seamless log ingestion from the DeCYFIR API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Cyber%20Intelligence/Data%20Connectors/CyfirmaCyberIntelligence_ccp/CyfirmaCyberIntel_DataConnectorDefinition.json","true" -"CyfirmaMalware_CL","Cyfirma Cyber Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Cyber%20Intelligence","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-cyber-intelligence","2025-05-15","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaCyberIntelligenceDC","Microsoft","CYFIRMA Cyber Intelligence","The CYFIRMA Cyber Intelligence data connector enables seamless log ingestion from the DeCYFIR API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Cyber%20Intelligence/Data%20Connectors/CyfirmaCyberIntelligence_ccp/CyfirmaCyberIntel_DataConnectorDefinition.json","true" -"CyfirmaThreatActors_CL","Cyfirma Cyber Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Cyber%20Intelligence","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-cyber-intelligence","2025-05-15","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaCyberIntelligenceDC","Microsoft","CYFIRMA Cyber Intelligence","The CYFIRMA Cyber Intelligence data connector enables seamless log ingestion from the DeCYFIR API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Cyber%20Intelligence/Data%20Connectors/CyfirmaCyberIntelligence_ccp/CyfirmaCyberIntel_DataConnectorDefinition.json","true" -"CyfirmaDBWMDarkWebAlerts_CL","Cyfirma Digital Risk","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-digital-risk","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaDigitalRiskAlertsConnector","Microsoft","CYFIRMA Digital Risk","The CYFIRMA Digital Risk Alerts data connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk/Data%20Connectors/CyfirmaDigitalRiskAlerts_ccp/CyfirmaDigitalRiskAlerts_DataConnectorDefinition.json","true" -"CyfirmaDBWMPhishingAlerts_CL","Cyfirma Digital Risk","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-digital-risk","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaDigitalRiskAlertsConnector","Microsoft","CYFIRMA Digital Risk","The CYFIRMA Digital Risk Alerts data connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk/Data%20Connectors/CyfirmaDigitalRiskAlerts_ccp/CyfirmaDigitalRiskAlerts_DataConnectorDefinition.json","true" -"CyfirmaDBWMRansomwareAlerts_CL","Cyfirma Digital Risk","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-digital-risk","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaDigitalRiskAlertsConnector","Microsoft","CYFIRMA Digital Risk","The CYFIRMA Digital Risk Alerts data connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk/Data%20Connectors/CyfirmaDigitalRiskAlerts_ccp/CyfirmaDigitalRiskAlerts_DataConnectorDefinition.json","true" -"CyfirmaSPEConfidentialFilesAlerts_CL","Cyfirma Digital Risk","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-digital-risk","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaDigitalRiskAlertsConnector","Microsoft","CYFIRMA Digital Risk","The CYFIRMA Digital Risk Alerts data connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk/Data%20Connectors/CyfirmaDigitalRiskAlerts_ccp/CyfirmaDigitalRiskAlerts_DataConnectorDefinition.json","true" -"CyfirmaSPEPIIAndCIIAlerts_CL","Cyfirma Digital Risk","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-digital-risk","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaDigitalRiskAlertsConnector","Microsoft","CYFIRMA Digital Risk","The CYFIRMA Digital Risk Alerts data connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk/Data%20Connectors/CyfirmaDigitalRiskAlerts_ccp/CyfirmaDigitalRiskAlerts_DataConnectorDefinition.json","true" -"CyfirmaSPESocialThreatAlerts_CL","Cyfirma Digital Risk","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-digital-risk","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaDigitalRiskAlertsConnector","Microsoft","CYFIRMA Digital Risk","The CYFIRMA Digital Risk Alerts data connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk/Data%20Connectors/CyfirmaDigitalRiskAlerts_ccp/CyfirmaDigitalRiskAlerts_DataConnectorDefinition.json","true" -"CyfirmaSPESourceCodeAlerts_CL","Cyfirma Digital Risk","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-digital-risk","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaDigitalRiskAlertsConnector","Microsoft","CYFIRMA Digital Risk","The CYFIRMA Digital Risk Alerts data connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk/Data%20Connectors/CyfirmaDigitalRiskAlerts_ccp/CyfirmaDigitalRiskAlerts_DataConnectorDefinition.json","true" -"CyfirmaVulnerabilities_CL","Cyfirma Vulnerabilities Intel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Vulnerabilities%20Intel","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-vulnerabilities","2025-05-15","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaVulnerabilitiesIntelDC","Microsoft","CYFIRMA Vulnerabilities Intelligence","The CYFIRMA Vulnerabilities Intelligence data connector enables seamless log ingestion from the DeCYFIR API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the CYFIRMA API's to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Vulnerabilities%20Intel/Data%20Connectors/CyfirmaVulnerabilitiesIntel_ccp/CyfirmaVulnerabilities_DataConnectorDefinition.json","true" -"CynerioEvent_CL","Cynerio","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cynerio","cynerio1681887657820","cynerio-medical-device-security-sentinel-connector","2023-03-29","2023-03-29","","Cynerio","Partner","https://cynerio.com","","domains","CynerioSecurityEvents","Cynerio","Cynerio Security Events","The [Cynerio](https://www.cynerio.com/) connector allows you to easily connect your Cynerio Security Events with Microsoft Sentinel, to view IDS Events. This gives you more insight into your organization network security posture and improves your security operation capabilities. ","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cynerio/Data%20Connectors/Cynerio_Connector.json","true" -"","Cyware","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyware","cywarelabsinc1709256751930","microsoft-sentinel-solution-cyware","2024-03-18","2024-03-18","","Cyware","Partner","","","domains","","","","","","false" -"","DEV-0537DetectionandHunting","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/DEV-0537DetectionandHunting","azuresentinel","azure-sentinel-solution-DEV-0537DetectionandHunting","2022-04-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"","DNS Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/DNS%20Essentials","azuresentinel","azure-sentinel-solution-dns-domain","2023-01-14","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"darktrace_model_alerts_CL","Darktrace","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Darktrace","darktrace1655286944672","darktrace_for_sentinel","2022-05-02","","","Darktrace","Partner","https://www.darktrace.com/en/contact/","","domains","DarktraceRESTConnector","Darktrace","Darktrace Connector for Microsoft Sentinel REST API","The Darktrace REST API connector pushes real-time events from Darktrace to Microsoft Sentinel and is designed to be used with the Darktrace Solution for Sentinel. The connector writes logs to a custom log table titled ""darktrace_model_alerts_CL""; Model Breaches, AI Analyst Incidents, System Alerts and Email Alerts can be ingested - additional filters can be set up on the Darktrace System Configuration page. Data is pushed to Sentinel from Darktrace masters.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Darktrace/Data%20Connectors/DarktraceConnectorRESTAPI.json","true" -"ThreatIntelligenceIndicator","Datalake2Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Datalake2Sentinel","cert_orange_cyberdefense","microsoft-sentinel-solution-datalake2sentinel","2024-01-15","2024-01-15","","Orange Cyberdefense","Partner","https://www.orangecyberdefense.com/global/contact","","domains,verticals","Datalake2SentinelConnector","Orange Cyberdefense","Datalake2Sentinel","This solution installs the Datalake2Sentinel connector which is built using the Codeless Connector Platform and allows you to automatically ingest threat intelligence indicators from **Datalake Orange Cyberdefense's CTI platform** into Microsoft Sentinel via the Upload Indicators REST API. After installing the solution, configure and enable this data connector by following guidance in Manage solution view.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Datalake2Sentinel/Data%20Connectors/Datalake2SentinelConnector.json","true" -"DataminrPulse_Alerts_CL","Dataminr Pulse","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dataminr%20Pulse","dataminrinc1648845584891","dataminr_sentinel","2023-04-12","2023-04-12","","Dataminr Support","Partner","https://www.dataminr.com/dataminr-support#support","","domains","DataminrPulseAlerts","Dataminr","Dataminr Pulse Alerts Data Connector","Dataminr Pulse Alerts Data Connector brings our AI-powered real-time intelligence into Microsoft Sentinel for faster threat detection and response.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dataminr%20Pulse/Data%20Connectors/DataminrPulseAlerts/DataminrPulseAlerts_FunctionApp.json","true" -"CommonSecurityLog","Delinea Secret Server","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Delinea%20Secret%20Server","delineainc1653506022260","delinea_secret_server_mss","2022-05-06","","","Delinea","Partner","https://delinea.com/support/","","domains","DelineaSecretServerAma","Delinea, Inc","[Deprecated] Delinea Secret Server via AMA","Common Event Format (CEF) from Delinea Secret Server ","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Delinea%20Secret%20Server/Data%20Connectors/template_DelineaSecretServerAMA.json","true" -"CommonSecurityLog","Delinea Secret Server","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Delinea%20Secret%20Server","delineainc1653506022260","delinea_secret_server_mss","2022-05-06","","","Delinea","Partner","https://delinea.com/support/","","domains","DelineaSecretServer_CEF","Delinea, Inc","[Deprecated] Delinea Secret Server via Legacy Agent","Common Event Format (CEF) from Delinea Secret Server ","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Delinea%20Secret%20Server/Data%20Connectors/DelineaSecretServer_CEF.json","true" -"","Dev 0270 Detection and Hunting","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dev%200270%20Detection%20and%20Hunting","azuresentinel","azure-sentinel-solution-dev0270detectionandhunting","2022-11-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"Syslog","Digital Guardian Data Loss Prevention","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Digital%20Guardian%20Data%20Loss%20Prevention","azuresentinel","azure-sentinel-solution-digitalguardiandlp","2021-07-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","DigitalGuardianDLP","Digital Guardian","[Deprecated] Digital Guardian Data Loss Prevention","[Digital Guardian Data Loss Prevention (DLP)](https://digitalguardian.com/platform-overview) data connector provides the capability to ingest Digital Guardian DLP logs into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Digital%20Guardian%20Data%20Loss%20Prevention/Data%20Connectors/Connector_DigitalGuardian_Syslog.json","true" -"DigitalShadows_CL","Digital Shadows","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Digital%20Shadows","digitalshadows1662022995707","digitalshadows_searchlight_for_sentinel","","","","Digital Shadows","Partner","https://www.digitalshadows.com/","","domains","DigitalShadowsSearchlightAzureFunctions","Digital Shadows","Digital Shadows Searchlight","The Digital Shadows data connector provides ingestion of the incidents and alerts from Digital Shadows Searchlight into the Microsoft Sentinel using the REST API. The connector will provide the incidents and alerts information such that it helps to examine, diagnose and analyse the potential security risks and threats.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Digital%20Shadows/Data%20Connectors/Digital%20Shadows/DigitalShadowsSearchlight_API_functionApp.json","true" -"","DomainTools","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/DomainTools","domaintoolsllc1647901527537","domaintools-iris-investigate","2022-10-20","","","DomainTools","Partner","https://www.domaintools.com/support/","","domains","","","","","","false" -"DoppelTable_CL","Doppel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Doppel","doppel","azure-sentinel-solution-doppel","2024-11-20","","","Doppel","Partner","https://www.doppel.com/request-a-demo","","domains","Doppel_DataConnector","Doppel","Doppel Data Connector","The data connector is built on Microsoft Sentinel for Doppel events and alerts and supports DCR-based [ingestion time transformations](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/ingestion-time-transformations) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Doppel/Data%20Connectors/Template_Doppel.json","true" -"DragosAlerts_CL","Dragos","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dragos","dragosinc1734451815609","microsoft-sentinel-solution-dragos","2025-01-23","2025-01-23","","Dragos Inc","Partner","https://www.dragos.com","","domains","DragosSitestoreCCP","Dragos"," Dragos Notifications via Cloud Sitestore","The [Dragos Platform](https://www.dragos.com/) is the leading Industrial Cyber Security platform it offers a comprehensive Operational Technology (OT) cyber threat detection built by unrivaled industrial cybersecurity expertise. This solution enables Dragos Platform notification data to be viewed in Microsoft Sentinel so that security analysts are able to triage potential cyber security events occurring in their industrial environments.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dragos/Data%20Connectors/DragosSiteStore_CCP/dragosSitestoreDataConnectorDefinition.json","true" -"DruvaInsyncEvents_CL","DruvaDataSecurityCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/DruvaDataSecurityCloud","druva-azuresentinel-solution","azure-sentinel-solution-druva","2024-12-24","","","Druva Inc","Partner","https://support.druva.com/","","domains","DruvaEventCCPDefinition","Microsoft","Druva Events Connector","Provides capability to ingest the Druva events from Druva APIs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/DruvaDataSecurityCloud/Data%20Connectors/Druva_ccp/Druva_DataConnectorDefinition.json","true" -"DruvaPlatformEvents_CL","DruvaDataSecurityCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/DruvaDataSecurityCloud","druva-azuresentinel-solution","azure-sentinel-solution-druva","2024-12-24","","","Druva Inc","Partner","https://support.druva.com/","","domains","DruvaEventCCPDefinition","Microsoft","Druva Events Connector","Provides capability to ingest the Druva events from Druva APIs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/DruvaDataSecurityCloud/Data%20Connectors/Druva_ccp/Druva_DataConnectorDefinition.json","true" -"DruvaSecurityEvents_CL","DruvaDataSecurityCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/DruvaDataSecurityCloud","druva-azuresentinel-solution","azure-sentinel-solution-druva","2024-12-24","","","Druva Inc","Partner","https://support.druva.com/","","domains","DruvaEventCCPDefinition","Microsoft","Druva Events Connector","Provides capability to ingest the Druva events from Druva APIs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/DruvaDataSecurityCloud/Data%20Connectors/Druva_ccp/Druva_DataConnectorDefinition.json","true" -"Dynamics365Activity","Dynamics 365","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynamics%20365","sentinel4dynamics365","dynamics365connector","2023-01-17","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Dynamics365","Microsoft","Dynamics 365","The Dynamics 365 Common Data Service (CDS) activities connector provides insight into admin, user, and support activities, as well as Microsoft Social Engagement logging events. By connecting Dynamics 365 CRM logs into Microsoft Sentinel, you can view this data in workbooks, use it to create custom alerts, and improve your investigation process. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com//fwlink/p/?linkid=2226719&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynamics%20365/Data%20Connectors/template_Dynamics365.json","true" -"DynatraceAttacks_CL","Dynatrace","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynatrace","dynatrace","dynatrace_azure_sentinel","2022-10-18","2023-10-16","","Dynatrace","Partner","https://www.dynatrace.com/services-support/","","domains","DynatraceAttacks","Dynatrace","Dynatrace Attacks","This connector uses the Dynatrace Attacks REST API to ingest detected attacks into Microsoft Sentinel Log Analytics","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynatrace/Data%20Connectors/Connector_Dynatrace_Attacks.json","true" -"DynatraceAuditLogs_CL","Dynatrace","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynatrace","dynatrace","dynatrace_azure_sentinel","2022-10-18","2023-10-16","","Dynatrace","Partner","https://www.dynatrace.com/services-support/","","domains","DynatraceAuditLogs","Dynatrace","Dynatrace Audit Logs","This connector uses the [Dynatrace Audit Logs REST API](https://docs.dynatrace.com/docs/dynatrace-api/environment-api/audit-logs) to ingest tenant audit logs into Microsoft Sentinel Log Analytics","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynatrace/Data%20Connectors/Connector_Dynatrace_AuditLogs.json","true" -"DynatraceProblems_CL","Dynatrace","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynatrace","dynatrace","dynatrace_azure_sentinel","2022-10-18","2023-10-16","","Dynatrace","Partner","https://www.dynatrace.com/services-support/","","domains","DynatraceProblems","Dynatrace","Dynatrace Problems","This connector uses the [Dynatrace Problem REST API](https://docs.dynatrace.com/docs/dynatrace-api/environment-api/problems-v2) to ingest problem events into Microsoft Sentinel Log Analytics","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynatrace/Data%20Connectors/Connector_Dynatrace_Problems.json","true" -"DynatraceSecurityProblems_CL","Dynatrace","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynatrace","dynatrace","dynatrace_azure_sentinel","2022-10-18","2023-10-16","","Dynatrace","Partner","https://www.dynatrace.com/services-support/","","domains","DynatraceRuntimeVulnerabilities","Dynatrace","Dynatrace Runtime Vulnerabilities","This connector uses the [Dynatrace Security Problem REST API](https://docs.dynatrace.com/docs/dynatrace-api/environment-api/application-security/vulnerabilities/get-vulnerabilities) to ingest detected runtime vulnerabilities into Microsoft Sentinel Log Analytics.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynatrace/Data%20Connectors/Connector_Dynatrace_RuntimeVulnerabilities.json","true" -"ESETInspect_CL","ESET Inspect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESET%20Inspect","esetresearch1579795941720","eset_enterprise_inspector_mss","2022-06-01","","","ESET Enterprise","Partner","https://www.eset.com/int/business/solutions/endpoint-detection-and-response/","","domains","ESETInspect","ESET Netherlands","ESET Inspect","This connector will ingest detections from [ESET Inspect](https://www.eset.com/int/business/solutions/xdr-extended-detection-and-response/) using the provided [REST API](https://help.eset.com/ei_navigate/latest/en-US/api.html). This API is present in ESET Inspect version 1.4 and later.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESET%20Inspect/Data%20Connectors/ESETInspect_API_FunctionApp.json","true" -"IntegrationTableIncidents_CL","ESET Protect Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESET%20Protect%20Platform","eset","eset-protect-platform-solution","2024-10-29","2025-06-17","","ESET Enterprise Integrations","Partner","https://help.eset.com/eset_connect/en-US/integrations.html","","domains","ESETProtectPlatform","ESET","ESET Protect Platform","The ESET Protect Platform data connector enables users to inject detections data from [ESET Protect Platform](https://www.eset.com/int/business/protect-platform/) using the provided [Integration REST API](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESET%20Protect%20Platform/Data%20Connectors). Integration REST API runs as scheduled Azure Function App.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESET%20Protect%20Platform/Data%20Connectors/ESETProtectPlatform_API_FunctionApp.json","true" -"IntegrationTable_CL","ESET Protect Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESET%20Protect%20Platform","eset","eset-protect-platform-solution","2024-10-29","2025-06-17","","ESET Enterprise Integrations","Partner","https://help.eset.com/eset_connect/en-US/integrations.html","","domains","ESETProtectPlatform","ESET","ESET Protect Platform","The ESET Protect Platform data connector enables users to inject detections data from [ESET Protect Platform](https://www.eset.com/int/business/protect-platform/) using the provided [Integration REST API](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESET%20Protect%20Platform/Data%20Connectors). Integration REST API runs as scheduled Azure Function App.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESET%20Protect%20Platform/Data%20Connectors/ESETProtectPlatform_API_FunctionApp.json","true" -"Syslog","ESETPROTECT","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESETPROTECT","cyberdefensegroupbv1625581149103","eset_protect","2021-10-20","","","ESET Netherlands","Partner","https://techcenter.eset.nl/en/","","domains","ESETPROTECT","ESET","[Deprecated] ESET PROTECT","This connector gathers all events generated by ESET software through the central management solution ESET PROTECT (formerly ESET Security Management Center). This includes Anti-Virus detections, Firewall detections but also more advanced EDR detections. For a complete list of events please refer to [the documentation](https://help.eset.com/protect_admin/latest/en-US/events-exported-to-json-format.html).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESETPROTECT/Data%20Connectors/Connector_Syslog_ESETPROTECT.json","true" -"","EatonForeseer","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/EatonForeseer","azuresentinel","azure-sentinel-solution-eatonforeseer","2022-06-28","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"","EclecticIQ","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/EclecticIQ","azuresentinel","azure-sentinel-solution-eclecticiqtip","2022-09-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"EgressDefend_CL","Egress Defend","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Egress%20Defend","egress1589289169584","azure-sentinel-solution-egress-defend","2023-07-27","","","egress1589289169584","Partner","https://support.egress.com/s/","","domains","EgressDefendPolling","Egress Software Technologies","Egress Defend","The Egress Defend audit connector provides the capability to ingest Egress Defend Data into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Egress%20Defend/Data%20Connectors/DefendAPIConnector.json","true" -"DefendAuditData","Egress Iris","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Egress%20Iris","egress1589289169584","egress-sentinel","2024-03-11","","","Egress Software Technologies Ltd","Partner","https://support.egress.com","","domains","EgressSiemPolling","Egress Software Technologies","Egress Iris Connector","The Egress Iris connector will allow you to ingest Egress data into Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Egress%20Iris/Data%20Connectors/EgressDataConnector.json","true" -"EgressEvents_CL","Egress Iris","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Egress%20Iris","egress1589289169584","egress-sentinel","2024-03-11","","","Egress Software Technologies Ltd","Partner","https://support.egress.com","","domains","EgressSiemPolling","Egress Software Technologies","Egress Iris Connector","The Egress Iris connector will allow you to ingest Egress data into Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Egress%20Iris/Data%20Connectors/EgressDataConnector.json","true" -"","Elastic Search","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Elastic%20Search","azuresentinel","azure-sentinel-solution-elasticsearch","2022-09-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"ElasticAgentLogs_CL","ElasticAgent","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ElasticAgent","azuresentinel","azure-sentinel-solution-elasticagent","2021-11-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ElasticAgent","Elastic","Elastic Agent","The [Elastic Agent](https://www.elastic.co/security) data connector provides the capability to ingest Elastic Agent logs, metrics, and security data into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ElasticAgent/Data%20Connectors/Connector_ElasticAgent.json","true" -"","Endace","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Endace","azuresentinel","azure-sentinel-solution-endace","2025-03-24","","","Endace","Partner","https://endace.com","","domains","","","","","","false" -"","Endpoint Threat Protection Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Endpoint%20Threat%20Protection%20Essentials","azuresentinel","azure-sentinel-solution-endpointthreat","2022-11-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"","Entrust identity as Service","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Entrust%20identity%20as%20Service","azuresentinel","azure-sentinel-solution-entrustidentity","2023-05-22","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"ErmesBrowserSecurityEvents_CL","Ermes Browser Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Ermes%20Browser%20Security","ermes","azure-sentinel-solution-ermes-browser-security","2023-09-29","","","Ermes Cyber Security S.p.A.","Partner","https://www.ermes.company","","domains","ErmesBrowserSecurityEvents","Ermes Cyber Security S.p.A.","Ermes Browser Security Events","Ermes Browser Security Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Ermes%20Browser%20Security/Data%20Connectors/ErmesBrowserSecurityEvents_ccp/data_connector_definition.json","true" -"eset_CL","Eset Security Management Center","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Eset%20Security%20Management%20Center","esetresearch1579795941720","Eset_Security_Management_Center_MSS","2022-05-11","","","Eset","partner","https://support.eset.com/en","","domains","EsetSMC","Eset","Eset Security Management Center","Connector for [Eset SMC](https://help.eset.com/esmc_admin/72/en-US/) threat events, audit logs, firewall events and web sites filter.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Eset%20Security%20Management%20Center/Data%20Connectors/esetSmc.json","true" -"Syslog","Exabeam Advanced Analytics","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Exabeam%20Advanced%20Analytics","azuresentinel","azure-sentinel-solution-exabeamadvancedanalytics","2022-05-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Exabeam","Exabeam","[Deprecated] Exabeam Advanced Analytics","The [Exabeam Advanced Analytics](https://www.exabeam.com/ueba/advanced-analytics-and-mitre-detect-and-stop-threats/) data connector provides the capability to ingest Exabeam Advanced Analytics events into Microsoft Sentinel. Refer to [Exabeam Advanced Analytics documentation](https://docs.exabeam.com/) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Exabeam%20Advanced%20Analytics/Data%20Connectors/Connector_Exabeam_Syslog.json","true" -"ExtraHop_Detections_CL","ExtraHop","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ExtraHop","extrahop","extrahop-revealx-sentinel","2025-02-11","2025-06-04","","ExtraHop Support","Partner","https://www.extrahop.com/customer-support","","domains","ExtraHop","ExtraHop","ExtraHop Detections Data Connector","The [ExtraHop](https://extrahop.com/) Detections Data Connector enables you to import detection data from ExtraHop RevealX to Microsoft Sentinel through webhook payloads.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ExtraHop/Data%20Connectors/ExtraHopDataConnector/ExtraHop_FunctionApp.json","true" -"CommonSecurityLog","ExtraHop Reveal(x)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ExtraHop%20Reveal%28x%29","extrahop","extrahop_revealx_mss","2022-05-19","","","ExtraHop","Partner","https://www.extrahop.com/support/","","domains","ExtraHopNetworks","ExtraHop Networks","[Deprecated] ExtraHop Reveal(x) via Legacy Agent","The ExtraHop Reveal(x) data connector enables you to easily connect your Reveal(x) system with Microsoft Sentinel to view dashboards, create custom alerts, and improve investigation. This integration gives you the ability to gain insight into your organization's network and improve your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ExtraHop%20Reveal%28x%29/Data%20Connectors/template_ExtraHopNetworks.json","true" -"CommonSecurityLog","ExtraHop Reveal(x)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ExtraHop%20Reveal%28x%29","extrahop","extrahop_revealx_mss","2022-05-19","","","ExtraHop","Partner","https://www.extrahop.com/support/","","domains","ExtraHopNetworksAma","ExtraHop Networks","[Deprecated] ExtraHop Reveal(x) via AMA","The ExtraHop Reveal(x) data connector enables you to easily connect your Reveal(x) system with Microsoft Sentinel to view dashboards, create custom alerts, and improve investigation. This integration gives you the ability to gain insight into your organization's network and improve your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ExtraHop%20Reveal%28x%29/Data%20Connectors/template_ExtraHopReveal%28x%29AMA.json","true" -"F5Telemetry_ASM_CL","F5 BIG-IP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20BIG-IP","f5-networks","f5_bigip_mss","2022-05-25","","","F5 Networks","Partner","https://support.f5.com/csp/home","","domains","F5BigIp","F5 Networks","F5 BIG-IP","The F5 firewall connector allows you to easily connect your F5 logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20BIG-IP/Data%20Connectors/F5BigIp.json","true" -"F5Telemetry_LTM_CL","F5 BIG-IP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20BIG-IP","f5-networks","f5_bigip_mss","2022-05-25","","","F5 Networks","Partner","https://support.f5.com/csp/home","","domains","F5BigIp","F5 Networks","F5 BIG-IP","The F5 firewall connector allows you to easily connect your F5 logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20BIG-IP/Data%20Connectors/F5BigIp.json","true" -"F5Telemetry_system_CL","F5 BIG-IP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20BIG-IP","f5-networks","f5_bigip_mss","2022-05-25","","","F5 Networks","Partner","https://support.f5.com/csp/home","","domains","F5BigIp","F5 Networks","F5 BIG-IP","The F5 firewall connector allows you to easily connect your F5 logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20BIG-IP/Data%20Connectors/F5BigIp.json","true" -"CommonSecurityLog","F5 Networks","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20Networks","f5-networks","f5_networks_data_mss","2022-05-12","","","F5","Partner","https://www.f5.com/services/support","","domains","F5","F5 Networks","[Deprecated] F5 Networks via Legacy Agent","The F5 firewall connector allows you to easily connect your F5 logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20Networks/Data%20Connectors/template_F5.json","true" -"CommonSecurityLog","F5 Networks","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20Networks","f5-networks","f5_networks_data_mss","2022-05-12","","","F5","Partner","https://www.f5.com/services/support","","domains","F5Ama","F5 Networks","[Deprecated] F5 Networks via AMA","The F5 firewall connector allows you to easily connect your F5 logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20Networks/Data%20Connectors/template_F5NetworksAMA.json","true" -"","FalconFriday","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/FalconFriday","falconforcebv1623147592118","falconfriday_content","2021-10-18","","","FalconForce","Partner","https://www.falconforce.nl/en/","","domains","","","","","","false" -"","Farsight DNSDB","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Farsight%20DNSDB","","","","","","","","","","","","","","","","false" -"feedly_indicators_CL","Feedly","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Feedly","feedlyinc1693853810319","azure-sentinel-solution-feedly","2023-08-01","","","Feedly Inc","Partner","https://feedly.com/i/support/contactUs","","domains","Feedly","Feedly","Feedly","This connector allows you to ingest IoCs from Feedly.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Feedly/Data%20Connectors/Feedly_API_AzureFunctionApp.json","true" -"CommonSecurityLog","FireEye Network Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/FireEye%20Network%20Security","azuresentinel","azure-sentinel-solution-fireeyenx","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","FireEyeNX","FireEye","[Deprecated] FireEye Network Security (NX) via Legacy Agent","The [FireEye Network Security (NX)](https://www.fireeye.com/products/network-security.html) data connector provides the capability to ingest FireEye Network Security logs into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/FireEye%20Network%20Security/Data%20Connectors/Connector_FireEyeNX_CEF.json","true" -"CommonSecurityLog","FireEye Network Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/FireEye%20Network%20Security","azuresentinel","azure-sentinel-solution-fireeyenx","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","FireEyeNXAma","FireEye","[Deprecated] FireEye Network Security (NX) via AMA","The [FireEye Network Security (NX)](https://www.fireeye.com/products/network-security.html) data connector provides the capability to ingest FireEye Network Security logs into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/FireEye%20Network%20Security/Data%20Connectors/template_FireEyeNX_CEFAMA.json","true" -"Firework_CL","Flare","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Flare","flaresystmesinc1617114736428","flare-systems-firework-sentinel","2021-10-20","","","Flare","Partner","https://flare.io/company/contact/","","domains","Flare","Flare","Flare","[Flare](https://flare.systems/platform/) connector allows you to receive data and intelligence from Flare on Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Flare/Data%20Connectors/Connector_REST_API_FlareSystemsFirework.json","true" -"CommonSecurityLog","Forcepoint CASB","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20CASB","microsoftsentinelcommunity","azure-sentinel-solution-forcepoint-casb","2022-05-19","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ForcepointCasb","Forcepoint CASB","[Deprecated] Forcepoint CASB via Legacy Agent","The Forcepoint CASB (Cloud Access Security Broker) Connector allows you to automatically export CASB logs and events into Microsoft Sentinel in real-time. This enriches visibility into user activities across locations and cloud applications, enables further correlation with data from Azure workloads and other feeds, and improves monitoring capability with Workbooks inside Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20CASB/Data%20Connectors/Forcepoint%20CASB.json","true" -"CommonSecurityLog","Forcepoint CASB","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20CASB","microsoftsentinelcommunity","azure-sentinel-solution-forcepoint-casb","2022-05-19","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ForcepointCasbAma","Forcepoint CASB","[Deprecated] Forcepoint CASB via AMA","The Forcepoint CASB (Cloud Access Security Broker) Connector allows you to automatically export CASB logs and events into Microsoft Sentinel in real-time. This enriches visibility into user activities across locations and cloud applications, enables further correlation with data from Azure workloads and other feeds, and improves monitoring capability with Workbooks inside Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20CASB/Data%20Connectors/template_Forcepoint%20CASBAMA.json","true" -"CommonSecurityLog","Forcepoint CSG","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20CSG","microsoftsentinelcommunity","azure-sentinel-solution-forcepoint-csg","2022-05-10","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ForcepointCSG","Forcepoint","[Deprecated] Forcepoint CSG via Legacy Agent","Forcepoint Cloud Security Gateway is a converged cloud security service that provides visibility, control, and threat protection for users and data, wherever they are. For more information visit: https://www.forcepoint.com/product/cloud-security-gateway","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20CSG/Data%20Connectors/ForcepointCloudSecurityGateway.json","true" -"CommonSecurityLog","Forcepoint CSG","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20CSG","microsoftsentinelcommunity","azure-sentinel-solution-forcepoint-csg","2022-05-10","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ForcepointCSGAma","Forcepoint","[Deprecated] Forcepoint CSG via AMA","Forcepoint Cloud Security Gateway is a converged cloud security service that provides visibility, control, and threat protection for users and data, wherever they are. For more information visit: https://www.forcepoint.com/product/cloud-security-gateway","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20CSG/Data%20Connectors/template_ForcepointCloudSecurityGatewayAMA.json","true" -"ForcepointDLPEvents_CL","Forcepoint DLP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20DLP","microsoftsentinelcommunity","azure-sentinel-solution-forcepoint-dlp","2022-05-09","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","Forcepoint_DLP","Forcepoint","Forcepoint DLP","The Forcepoint DLP (Data Loss Prevention) connector allows you to automatically export DLP incident data from Forcepoint DLP into Microsoft Sentinel in real-time. This enriches visibility into user activities and data loss incidents, enables further correlation with data from Azure workloads and other feeds, and improves monitoring capability with Workbooks inside Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20DLP/Data%20Connectors/Forcepoint%20DLP.json","true" -"CommonSecurityLog","Forcepoint NGFW","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20NGFW","microsoftsentinelcommunity","azure-sentinel-solution-forcepoint-ngfw","2022-05-25","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ForcepointNgfw","Forcepoint","[Deprecated] Forcepoint NGFW via Legacy Agent","The Forcepoint NGFW (Next Generation Firewall) connector allows you to automatically export user-defined Forcepoint NGFW logs into Microsoft Sentinel in real-time. This enriches visibility into user activities recorded by NGFW, enables further correlation with data from Azure workloads and other feeds, and improves monitoring capability with Workbooks inside Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20NGFW/Data%20Connectors/FORCEPOINT_NGFW.json","true" -"CommonSecurityLog","Forcepoint NGFW","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20NGFW","microsoftsentinelcommunity","azure-sentinel-solution-forcepoint-ngfw","2022-05-25","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ForcepointNgfwAma","Forcepoint","[Deprecated] Forcepoint NGFW via AMA","The Forcepoint NGFW (Next Generation Firewall) connector allows you to automatically export user-defined Forcepoint NGFW logs into Microsoft Sentinel in real-time. This enriches visibility into user activities recorded by NGFW, enables further correlation with data from Azure workloads and other feeds, and improves monitoring capability with Workbooks inside Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20NGFW/Data%20Connectors/template_FORCEPOINT_NGFWAMA.json","true" -"Syslog","Forescout (Legacy)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forescout%20%28Legacy%29","azuresentinel","azure-sentinel-solution-forescout","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Forescout","Forescout","Forescout","The [Forescout](https://www.forescout.com/) data connector provides the capability to ingest [Forescout events](https://docs.forescout.com/bundle/syslog-3-6-1-h/page/syslog-3-6-1-h.How-to-Work-with-the-Syslog-Plugin.html) into Microsoft Sentinel. Refer to [Forescout documentation](https://docs.forescout.com/bundle/syslog-msg-3-6-tn/page/syslog-msg-3-6-tn.About-Syslog-Messages-in-Forescout.html) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forescout%20%28Legacy%29/Data%20Connectors/Forescout_syslog.json","true" -"ForescoutOtAlert_CL","Forescout eyeInspect for OT Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forescout%20eyeInspect%20for%20OT%20Security","forescout","azure-sentinel-eyeinspectotsecurity","2025-07-10","","","Forescout Technologies","Partner","https://www.forescout.com/support","","domains","Forescout_eyeInspect_for_OT_Security","Forescout","Forescout eyeInspect for OT Security","Forescout eyeInspect for OT Security connector allows you to connect Asset/Alert information from Forescout eyeInspect OT platform with Microsoft Sentinel, to view and analyze data using Log Analytics Tables and Workbooks. This gives you more insight into OT organization network and improves security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forescout%20eyeInspect%20for%20OT%20Security/Data%20Connectors/Forescout%20eyeInspect%20for%20OT%20Security.json","true" -"ForescoutOtAsset_CL","Forescout eyeInspect for OT Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forescout%20eyeInspect%20for%20OT%20Security","forescout","azure-sentinel-eyeinspectotsecurity","2025-07-10","","","Forescout Technologies","Partner","https://www.forescout.com/support","","domains","Forescout_eyeInspect_for_OT_Security","Forescout","Forescout eyeInspect for OT Security","Forescout eyeInspect for OT Security connector allows you to connect Asset/Alert information from Forescout eyeInspect OT platform with Microsoft Sentinel, to view and analyze data using Log Analytics Tables and Workbooks. This gives you more insight into OT organization network and improves security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forescout%20eyeInspect%20for%20OT%20Security/Data%20Connectors/Forescout%20eyeInspect%20for%20OT%20Security.json","true" -"ForescoutComplianceStatus_CL","ForescoutHostPropertyMonitor","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ForescoutHostPropertyMonitor","forescout","azure-sentinel-solution-forescout","2022-06-28","","","Forescout Technologies","Partner","https://www.forescout.com/support","","domains","ForescoutHostPropertyMonitor","Forescout","Forescout Host Property Monitor","The Forescout Host Property Monitor connector allows you to connect host/policy/compliance properties from Forescout platform with Microsoft Sentinel, to view, create custom incidents, and improve investigation. This gives you more insight into your organization network and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ForescoutHostPropertyMonitor/Data%20Connectors/ForescoutHostPropertyMonitor.json","true" -"ForescoutHostProperties_CL","ForescoutHostPropertyMonitor","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ForescoutHostPropertyMonitor","forescout","azure-sentinel-solution-forescout","2022-06-28","","","Forescout Technologies","Partner","https://www.forescout.com/support","","domains","ForescoutHostPropertyMonitor","Forescout","Forescout Host Property Monitor","The Forescout Host Property Monitor connector allows you to connect host/policy/compliance properties from Forescout platform with Microsoft Sentinel, to view, create custom incidents, and improve investigation. This gives you more insight into your organization network and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ForescoutHostPropertyMonitor/Data%20Connectors/ForescoutHostPropertyMonitor.json","true" -"ForescoutPolicyStatus_CL","ForescoutHostPropertyMonitor","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ForescoutHostPropertyMonitor","forescout","azure-sentinel-solution-forescout","2022-06-28","","","Forescout Technologies","Partner","https://www.forescout.com/support","","domains","ForescoutHostPropertyMonitor","Forescout","Forescout Host Property Monitor","The Forescout Host Property Monitor connector allows you to connect host/policy/compliance properties from Forescout platform with Microsoft Sentinel, to view, create custom incidents, and improve investigation. This gives you more insight into your organization network and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ForescoutHostPropertyMonitor/Data%20Connectors/ForescoutHostPropertyMonitor.json","true" -"CommonSecurityLog","ForgeRock Common Audit for CEF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ForgeRock%20Common%20Audit%20for%20CEF","publisherid_test","offerid_test","2022-05-04","","","Forgerock","Partner","https://www.forgerock.com/support","","domains","ForgeRock","ForgeRock Inc","[Deprecated] ForgeRock Identity Platform","The ForgeRock Identity Platform provides a single common auditing framework. Extract and aggregate log data across the entire platform with common audit (CAUD) event handlers and unique IDs so that it can be tracked holistically. Open and extensible, you can leverage audit logging and reporting capabilities for integration with Microsoft Sentinel via this CAUD for CEF connector.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ForgeRock%20Common%20Audit%20for%20CEF/Data%20Connectors/ForgeRock_CEF.json","true" -"CommonSecurityLog","Fortinet FortiGate Next-Generation Firewall connector for Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiGate%20Next-Generation%20Firewall%20connector%20for%20Microsoft%20Sentinel","azuresentinel","azure-sentinel-solution-fortinetfortigate","2021-08-13","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Fortinet","Fortinet","[Deprecated] Fortinet via Legacy Agent","The Fortinet firewall connector allows you to easily connect your Fortinet logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiGate%20Next-Generation%20Firewall%20connector%20for%20Microsoft%20Sentinel/Data%20Connectors/Fortinet-FortiGate.json","true" -"CommonSecurityLog","Fortinet FortiGate Next-Generation Firewall connector for Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiGate%20Next-Generation%20Firewall%20connector%20for%20Microsoft%20Sentinel","azuresentinel","azure-sentinel-solution-fortinetfortigate","2021-08-13","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","FortinetAma","Fortinet","[Deprecated] Fortinet via AMA","The Fortinet firewall connector allows you to easily connect your Fortinet logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiGate%20Next-Generation%20Firewall%20connector%20for%20Microsoft%20Sentinel/Data%20Connectors/template_Fortinet-FortiGateAma.json","true" -"FncEventsDetections_CL","Fortinet FortiNDR Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiNDR%20Cloud","fortinet","fortindrcloud-sentinel","2024-01-15","","","Fortinet","Partner","https://www.fortinet.com/support","","domains","FortinetFortiNdrCloudDataConnector","Fortinet","Fortinet FortiNDR Cloud","The Fortinet FortiNDR Cloud data connector provides the capability to ingest [Fortinet FortiNDR Cloud](https://docs.fortinet.com/product/fortindr-cloud) data into Microsoft Sentinel using the FortiNDR Cloud API","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiNDR%20Cloud/Data%20Connectors/FortinetFortiNdrCloud_API_AzureFunctionApp.json","true" -"FncEventsObservation_CL","Fortinet FortiNDR Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiNDR%20Cloud","fortinet","fortindrcloud-sentinel","2024-01-15","","","Fortinet","Partner","https://www.fortinet.com/support","","domains","FortinetFortiNdrCloudDataConnector","Fortinet","Fortinet FortiNDR Cloud","The Fortinet FortiNDR Cloud data connector provides the capability to ingest [Fortinet FortiNDR Cloud](https://docs.fortinet.com/product/fortindr-cloud) data into Microsoft Sentinel using the FortiNDR Cloud API","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiNDR%20Cloud/Data%20Connectors/FortinetFortiNdrCloud_API_AzureFunctionApp.json","true" -"FncEventsSuricata_CL","Fortinet FortiNDR Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiNDR%20Cloud","fortinet","fortindrcloud-sentinel","2024-01-15","","","Fortinet","Partner","https://www.fortinet.com/support","","domains","FortinetFortiNdrCloudDataConnector","Fortinet","Fortinet FortiNDR Cloud","The Fortinet FortiNDR Cloud data connector provides the capability to ingest [Fortinet FortiNDR Cloud](https://docs.fortinet.com/product/fortindr-cloud) data into Microsoft Sentinel using the FortiNDR Cloud API","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiNDR%20Cloud/Data%20Connectors/FortinetFortiNdrCloud_API_AzureFunctionApp.json","true" -"CommonSecurityLog","Fortinet FortiWeb Cloud WAF-as-a-Service connector for Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiWeb%20Cloud%20WAF-as-a-Service%20connector%20for%20Microsoft%20Sentinel","azuresentinel","azure-sentinel-solution-fortiwebcloud","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","FortinetFortiWeb","Microsoft","[Deprecated] Fortinet FortiWeb Web Application Firewall via Legacy Agent","The [fortiweb](https://www.fortinet.com/products/web-application-firewall/fortiweb) data connector provides the capability to ingest Threat Analytics and events into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiWeb%20Cloud%20WAF-as-a-Service%20connector%20for%20Microsoft%20Sentinel/Data%20Connectors/Fortiweb.json","true" -"CommonSecurityLog","Fortinet FortiWeb Cloud WAF-as-a-Service connector for Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiWeb%20Cloud%20WAF-as-a-Service%20connector%20for%20Microsoft%20Sentinel","azuresentinel","azure-sentinel-solution-fortiwebcloud","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","FortinetFortiWebAma","Microsoft","Fortinet FortiWeb Web Application Firewall via AMA","The [fortiweb](https://www.fortinet.com/products/web-application-firewall/fortiweb) data connector provides the capability to ingest Threat Analytics and events into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiWeb%20Cloud%20WAF-as-a-Service%20connector%20for%20Microsoft%20Sentinel/Data%20Connectors/template_FortiwebAma.json","true" -"","GDPR Compliance & Data Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GDPR%20Compliance%20%26%20Data%20Security","azuresentinel","azure-sentinel-solution-gdpr-compliance","2025-10-08","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"Garrison_ULTRARemoteLogs_CL","Garrison ULTRA","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Garrison%20ULTRA","garrisontechnologyltd1725375696148","microsoft-sentinel-solution-garrison-ultra","2024-10-04","","","Garrison","Partner","https://support.ultra.garrison.com","","domains","GarrisonULTRARemoteLogs","Garrison","Garrison ULTRA Remote Logs","The [Garrison ULTRA](https://www.garrison.com/en/garrison-ultra-cloud-platform) Remote Logs connector allows you to ingest Garrison ULTRA Remote Logs into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Garrison%20ULTRA/Data%20Connectors/GarrisonULTRARemoteLogs/GarrisonULTRARemoteLogs_ConnectorUI.json","true" -"Gigamon_CL","Gigamon Connector","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Gigamon%20Connector","gigamon-inc","microsoft-sentinel-solution-gigamon","","","","Gigamon","Partner","https://www.gigamon.com/","","domains","GigamonDataConnector","Gigamon","Gigamon AMX Data Connector","Use this data connector to integrate with Gigamon Application Metadata Exporter (AMX) and get data sent directly to Microsoft Sentinel. ","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Gigamon%20Connector/Data%20Connectors/Connector_Analytics_Gigamon.json","true" -"GitHubAuditLogsV2_CL","GitHub","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GitHub","microsoftcorporation1622712991604","sentinel4github","2021-10-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GitHubAuditDefinitionV2","Microsoft","GitHub Enterprise Audit Log (via Codeless Connector Framework) (Preview)","The GitHub audit log connector provides the capability to ingest GitHub logs into Microsoft Sentinel. By connecting GitHub audit logs into Microsoft Sentinel, you can view this data in workbooks, use it to create custom alerts, and improve your investigation process.

**Note:** If you intended to ingest GitHub subscribed events into Microsoft Sentinel, please refer to GitHub (using Webhooks) Connector from ""**Data Connectors**"" gallery.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GitHub/Data%20Connectors/GitHubAuditLogs_CCF/GitHubAuditLogs_ConnectorDefinition.json","true" -"GitHubAuditLogPolling_CL","GitHub","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GitHub","microsoftcorporation1622712991604","sentinel4github","2021-10-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GitHubEcAuditLogPolling","GitHub","[Deprecated] GitHub Enterprise Audit Log","The GitHub audit log connector provides the capability to ingest GitHub logs into Microsoft Sentinel. By connecting GitHub audit logs into Microsoft Sentinel, you can view this data in workbooks, use it to create custom alerts, and improve your investigation process.

**Note:** If you intended to ingest GitHub subscribed events into Microsoft Sentinel, please refer to GitHub (using Webhooks) Connector from ""**Data Connectors**"" gallery.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GitHub/Data%20Connectors/azuredeploy_GitHub_native_poller_connector.json","true" -"githubscanaudit_CL","GitHub","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GitHub","microsoftcorporation1622712991604","sentinel4github","2021-10-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GitHubWebhook","Microsoft","GitHub (using Webhooks)","The [GitHub](https://www.github.com) webhook data connector provides the capability to ingest GitHub subscribed events into Microsoft Sentinel using [GitHub webhook events](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads). The connector provides ability to get events into Microsoft Sentinel which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

**Note:** If you are intended to ingest Github Audit logs, Please refer to GitHub Enterprise Audit Log Connector from ""**Data Connectors**"" gallery.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GitHub/Data%20Connectors/GithubWebhook/GithubWebhook_API_FunctionApp.json","true" -"Syslog","GitLab","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GitLab","azuresentinel","azure-sentinel-solution-gitlab","2022-04-27","2022-06-27","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GitLab","Microsoft","[Deprecated] GitLab","The [GitLab](https://about.gitlab.com/solutions/devops-platform/) connector allows you to easily connect your GitLab (GitLab Enterprise Edition - Standalone) logs with Microsoft Sentinel. This gives you more security insight into your organization's DevOps pipelines.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GitLab/Data%20Connectors/Connector_Syslog_GitLab.json","true" -"","Global Secure Access","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Global%20Secure%20Access","azuresentinel","azure-sentinel-solution-globalsecureaccess","2024-04-08","","","Microsoft Corporation","Microsoft","https://learn.microsoft.com/en-us/entra/global-secure-access/overview-what-is-global-secure-access","","domains","","","","","","false" -"ApigeeX_CL","Google Apigee","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Apigee","azuresentinel","azure-sentinel-solution-googleapigeex","2021-10-28","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ApigeeXDataConnector","Google","[DEPRECATED] Google ApigeeX","The [Google ApigeeX](https://cloud.google.com/apigee/docs) data connector provides the capability to ingest ApigeeX audit logs into Microsoft Sentinel using the GCP Logging API. Refer to [GCP Logging API documentation](https://cloud.google.com/logging/docs/reference/v2/rest) for more information.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Apigee/Data%20Connectors/ApigeeX_FunctionApp.json","true" -"GCPApigee","Google Apigee","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Apigee","azuresentinel","azure-sentinel-solution-googleapigeex","2021-10-28","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GoogleApigeeXLogsCCPDefinition","Microsoft","Google ApigeeX (via Codeless Connector Framework)","The Google ApigeeX data connector provides the capability to ingest Audit logs into Microsoft Sentinel using the Google Apigee API. Refer to [Google Apigee API](https://cloud.google.com/apigee/docs/reference/apis/apigee/rest/?apix=true) documentation for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Apigee/Data%20Connectors/GoogleApigeeXLog_CCP/GoogleApigeeXLog_ConnectorDefinition.json","true" -"GCPAuditLogs","Google Cloud Platform Audit Logs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Audit%20Logs","azuresentinel","azure-sentinel-solution-gcpauditlogs-api","2023-03-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GCPAuditLogsDefinition","Microsoft","GCP Pub/Sub Audit Logs","The Google Cloud Platform (GCP) audit logs, ingested from Microsoft Sentinel's connector, enables you to capture three types of audit logs: admin activity logs, data access logs, and access transparency logs. Google cloud audit logs record a trail that practitioners can use to monitor access and detect potential threats across Google Cloud Platform (GCP) resources.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Audit%20Logs/Data%20Connectors/GCPAuditLogs_ccp/data_connector_definition.json","true" -"","Google Cloud Platform BigQuery","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20BigQuery","azuresentinel","azure-sentinel-solution-gcpbigquery","2023-03-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"GCPMonitoring","Google Cloud Platform Cloud Monitoring","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Monitoring","azuresentinel","azure-sentinel-solution-gcpmonitoring","2022-07-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GCPMonitorCCPDefinition","Microsoft","Google Cloud Platform Cloud Monitoring (via Codeless Connector Framework)","The Google Cloud Platform Cloud Monitoring data connector ingests Monitoring logs from Google Cloud into Microsoft Sentinel using the Google Cloud Monitoring API. Refer to [Cloud Monitoring API](https://cloud.google.com/monitoring/api/v3) documentation for more details.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Monitoring/Data%20Connectors/GCPMonitoringLogs_CCP/GCPCloudMonitoringLogs_ConnectorDefinition.json","true" -"GCP_MONITORING_CL","Google Cloud Platform Cloud Monitoring","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Monitoring","azuresentinel","azure-sentinel-solution-gcpmonitoring","2022-07-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GCPMonitorDataConnector","Google","[DEPRECATED] Google Cloud Platform Cloud Monitoring","The Google Cloud Platform Cloud Monitoring data connector provides the capability to ingest [GCP Monitoring metrics](https://cloud.google.com/monitoring/api/metrics_gcp) into Microsoft Sentinel using the GCP Monitoring API. Refer to [GCP Monitoring API documentation](https://cloud.google.com/monitoring/api/v3) for more information.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Monitoring/Data%20Connectors/GCP_Monitor_API_FunctionApp.json","true" -"GCPCloudRun","Google Cloud Platform Cloud Run","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Run","azuresentinel","azure-sentinel-solution-gcpcloudrun","2021-07-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GCPCloudRunLogs_ConnectorDefinition","Microsoft","GCP Cloud Run (via Codeless Connector Framework)","The GCP Cloud Run data connector provides the capability to ingest Cloud Run request logs into Microsoft Sentinel using Pub/Sub. Refer the [Cloud Run Overview](https://cloud.google.com/run/docs/logging) for more details.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Run/Data%20Connectors/GCPCloudRunLog_CCF/GCPCloudRunLogs_ConnectorDefinition.json","true" -"GCPComputeEngine","Google Cloud Platform Compute Engine","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Compute%20Engine","azuresentinel","azure-sentinel-solution-gcpcomputeengine","2022-07-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GCPComputeEngineLogsCCPDefinition","Microsoft","Google Cloud Platform Compute Engine (via Codeless Connector Framework)","The Google Cloud Platform Compute Engine data connector provides the capability to ingest Compute Engine Audit logs into Microsoft Sentinel using the Google Cloud Compute Engine API. Refer to [Cloud Compute Engine API](https://cloud.google.com/compute/docs/reference/rest/v1) documentation for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Compute%20Engine/Data%20Connectors/GCPComputeEngineLog_CCP/GCPComputeEngineLog_ConnectorDefinition.json","true" -"GCPFirewallLogs","Google Cloud Platform Firewall Logs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Firewall%20Logs","azuresentinel","azure-sentinel-solution-gcpfirewalllogs-api","2024-11-03","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GCPFirewallLogsCCPDefinition","Microsoft","GCP Pub/Sub Firewall Logs","The Google Cloud Platform (GCP) firewall logs, enable you to capture network inbound and outbound activity to monitor access and detect potential threats across Google Cloud Platform (GCP) resources.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Firewall%20Logs/Data%20Connectors/GCPFirewallLogs_ccp/GCP_ConnectorDefinition.json","true" -"GCPLoadBalancerLogs_CL","Google Cloud Platform Load Balancer Logs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Load%20Balancer%20Logs","azuresentinel","azure-sentinel-solution-gcploadbalancerlogs-api","2025-02-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GCPFLoadBalancerLogsCCPDefinition","Microsoft","GCP Pub/Sub Load Balancer Logs (via Codeless Connector Platform).","Google Cloud Platform (GCP) Load Balancer logs provide detailed insights into network traffic, capturing both inbound and outbound activities. These logs are used for monitoring access patterns and identifying potential security threats across GCP resources. Additionally, these logs also include GCP Web Application Firewall (WAF) logs, enhancing the ability to detect and mitigate risks effectively.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Load%20Balancer%20Logs/Data%20Connectors/GCPFLoadBalancerLogs_GCP_CCP/GCPFLoadBalancerLogs_Definition.json","true" -"GoogleCloudSCC","Google Cloud Platform Security Command Center","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Security%20Command%20Center","azuresentinel","azure-sentinel-solution-gcpscclogs-api","2023-09-11","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GoogleSCCDefinition","Microsoft","Google Security Command Center","The Google Cloud Platform (GCP) Security Command Center is a comprehensive security and risk management platform for Google Cloud, ingested from Sentinel's connector. It offers features such as asset inventory and discovery, vulnerability and threat detection, and risk mitigation and remediation to help you gain insight into your organization's security and data attack surface. This integration enables you to perform tasks related to findings and assets more effectively.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Security%20Command%20Center/Data%20Connectors/GCPSecurityCommandCenter.json","true" -"GCPVPCFlow","Google Cloud Platform VPC Flow Logs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20VPC%20Flow%20Logs","azuresentinel","azure-sentinel-solution-gcpvpcflowlogs-api","2025-02-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GCPVPCFlowLogsCCPDefinition","Microsoft","GCP Pub/Sub VPC Flow Logs (via Codeless Connector Framework)","The Google Cloud Platform (GCP) VPC Flow Logs enable you to capture network traffic activity at the VPC level, allowing you to monitor access patterns, analyze network performance, and detect potential threats across GCP resources.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20VPC%20Flow%20Logs/Data%20Connectors/GCPVPCFlowLogs_GCP_CCP/GCPVPCFlowLogs_ConnectorDefinition.json","true" -"GKEAPIServer","Google Kubernetes Engine","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine","azuresentinel","azure-sentinel-solution-gkelogs-api","2025-04-04","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GKECCPDefinition","Microsoft","Google Kubernetes Engine (via Codeless Connector Framework)","The Google Kubernetes Engine (GKE) Logs enable you to capture cluster activity, workload behavior, and security events, allowing you to monitor Kubernetes workloads, analyze performance, and detect potential threats across GKE clusters.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine/Data%20Connectors/GoogleKubernetesEngineLogs_ccp/GoogleKubernetesEngineLogs_ConnectorDefinition.json","true" -"GKEApplication","Google Kubernetes Engine","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine","azuresentinel","azure-sentinel-solution-gkelogs-api","2025-04-04","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GKECCPDefinition","Microsoft","Google Kubernetes Engine (via Codeless Connector Framework)","The Google Kubernetes Engine (GKE) Logs enable you to capture cluster activity, workload behavior, and security events, allowing you to monitor Kubernetes workloads, analyze performance, and detect potential threats across GKE clusters.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine/Data%20Connectors/GoogleKubernetesEngineLogs_ccp/GoogleKubernetesEngineLogs_ConnectorDefinition.json","true" -"GKEAudit","Google Kubernetes Engine","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine","azuresentinel","azure-sentinel-solution-gkelogs-api","2025-04-04","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GKECCPDefinition","Microsoft","Google Kubernetes Engine (via Codeless Connector Framework)","The Google Kubernetes Engine (GKE) Logs enable you to capture cluster activity, workload behavior, and security events, allowing you to monitor Kubernetes workloads, analyze performance, and detect potential threats across GKE clusters.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine/Data%20Connectors/GoogleKubernetesEngineLogs_ccp/GoogleKubernetesEngineLogs_ConnectorDefinition.json","true" -"GKEControllerManager","Google Kubernetes Engine","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine","azuresentinel","azure-sentinel-solution-gkelogs-api","2025-04-04","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GKECCPDefinition","Microsoft","Google Kubernetes Engine (via Codeless Connector Framework)","The Google Kubernetes Engine (GKE) Logs enable you to capture cluster activity, workload behavior, and security events, allowing you to monitor Kubernetes workloads, analyze performance, and detect potential threats across GKE clusters.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine/Data%20Connectors/GoogleKubernetesEngineLogs_ccp/GoogleKubernetesEngineLogs_ConnectorDefinition.json","true" -"GKEHPADecision","Google Kubernetes Engine","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine","azuresentinel","azure-sentinel-solution-gkelogs-api","2025-04-04","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GKECCPDefinition","Microsoft","Google Kubernetes Engine (via Codeless Connector Framework)","The Google Kubernetes Engine (GKE) Logs enable you to capture cluster activity, workload behavior, and security events, allowing you to monitor Kubernetes workloads, analyze performance, and detect potential threats across GKE clusters.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine/Data%20Connectors/GoogleKubernetesEngineLogs_ccp/GoogleKubernetesEngineLogs_ConnectorDefinition.json","true" -"GKEScheduler","Google Kubernetes Engine","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine","azuresentinel","azure-sentinel-solution-gkelogs-api","2025-04-04","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GKECCPDefinition","Microsoft","Google Kubernetes Engine (via Codeless Connector Framework)","The Google Kubernetes Engine (GKE) Logs enable you to capture cluster activity, workload behavior, and security events, allowing you to monitor Kubernetes workloads, analyze performance, and detect potential threats across GKE clusters.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine/Data%20Connectors/GoogleKubernetesEngineLogs_ccp/GoogleKubernetesEngineLogs_ConnectorDefinition.json","true" -"","Google Threat Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Threat%20Intelligence","virustotalsl1681486227461","azure-sentinel-solution-google","2024-10-26","2024-10-26","","Google","Partner","https://www.virustotal.com/gui/contact-us","","domains","","","","","","false" -"GCPCDN","GoogleCloudPlatformCDN","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformCDN","azuresentinel","azure-sentinel-solution-gcp-cdn","2025-03-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GCPCDNLogsCCPDefinition","Microsoft","Google Cloud Platform CDN (via Codeless Connector Framework)","The Google Cloud Platform CDN data connector provides the capability to ingest Cloud CDN Audit logs and Cloud CDN Traffic logs into Microsoft Sentinel using the Compute Engine API. Refer the [Product overview](https://cloud.google.com/cdn/docs/overview) document for more details.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformCDN/Data%20Connectors/GCPCDNLogs_ccp/GCPCDNLogs_ConnectorDefinition.json","true" -"GCP_DNS_CL","GoogleCloudPlatformDNS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformDNS","azuresentinel","azure-sentinel-solution-gcpdns","2022-07-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GCPDNSDataConnector","Google","[DEPRECATED] Google Cloud Platform DNS","The Google Cloud Platform DNS data connector provides the capability to ingest [Cloud DNS query logs](https://cloud.google.com/dns/docs/monitoring#using_logging) and [Cloud DNS audit logs](https://cloud.google.com/dns/docs/audit-logging) into Microsoft Sentinel using the GCP Logging API. Refer to [GCP Logging API documentation](https://cloud.google.com/logging/docs/api) for more information.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformDNS/Data%20Connectors/GCP_DNS_API_FunctionApp.json","true" -"GCPDNS","GoogleCloudPlatformDNS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformDNS","azuresentinel","azure-sentinel-solution-gcpdns","2022-07-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GCPDNSLogsCCPDefinition","Microsoft","Google Cloud Platform DNS (via Codeless Connector Framework)","The Google Cloud Platform DNS data connector provides the capability to ingest Cloud DNS Query logs and Cloud DNS Audit logs into Microsoft Sentinel using the Google Cloud DNS API. Refer to [Cloud DNS API](https://cloud.google.com/dns/docs/reference/rest/v1) documentation for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformDNS/Data%20Connectors/GCPDNSLog_CCP/GCPDNSLog_ConnectorDefinition.json","true" -"GCPIAM","GoogleCloudPlatformIAM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformIAM","azuresentinel","azure-sentinel-solution-gcpiam","2021-07-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GCPIAMCCPDefinition","Microsoft","Google Cloud Platform IAM (via Codeless Connector Framework)","The Google Cloud Platform IAM data connector provides the capability to ingest the Audit logs relating to Identity and Access Management (IAM) activities within Google Cloud into Microsoft Sentinel using the Google IAM API. Refer to [GCP IAM API](https://cloud.google.com/iam/docs/reference/rest) documentation for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformIAM/Data%20Connectors/GCPIAMLog_CCP/GCPIAMLog_ConnectorDefinition.json","true" -"GCP_IAM_CL","GoogleCloudPlatformIAM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformIAM","azuresentinel","azure-sentinel-solution-gcpiam","2021-07-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GCPIAMDataConnector","Google","[DEPRECATED] Google Cloud Platform IAM","The Google Cloud Platform Identity and Access Management (IAM) data connector provides the capability to ingest [GCP IAM logs](https://cloud.google.com/iam/docs/audit-logging) into Microsoft Sentinel using the GCP Logging API. Refer to [GCP Logging API documentation](https://cloud.google.com/logging/docs/api) for more information.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformIAM/Data%20Connectors/GCP_IAM_API_FunctionApp.json","true" -"GCPIDS","GoogleCloudPlatformIDS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformIDS","azuresentinel","azure-sentinel-solution-gcpids","2022-07-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GCPCLOUDIDSLogsCCPDefinition","Microsoft","Google Cloud Platform Cloud IDS (via Codeless Connector Framework)","The Google Cloud Platform IDS data connector provides the capability to ingest Cloud IDS Traffic logs, Threat logs and Audit logs into Microsoft Sentinel using the Google Cloud IDS API. Refer to [Cloud IDS API](https://cloud.google.com/intrusion-detection-system/docs/audit-logging#google.cloud.ids.v1.IDS) documentation for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformIDS/Data%20Connectors/GCPCloudIDSLog_CCP/GCPCloudIDSLog_ConnectorDefinition.json","true" -"GCPNAT","GoogleCloudPlatformNAT","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformNAT","azuresentinel","azure-sentinel-solution-gcp-nat","2025-05-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GCPNATLogsCCPDefinition","Microsoft","Google Cloud Platform NAT (via Codeless Connector Framework)","The Google Cloud Platform NAT data connector provides the capability to ingest Cloud NAT Audit logs and Cloud NAT Traffic logs into Microsoft Sentinel using the Compute Engine API. Refer the [Product overview](https://cloud.google.com/nat/docs/overview) document for more details.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformNAT/Data%20Connectors/GCPNATLogs_ccp/GCPNATLogs_ConnectorDefinition.json","true" -"GCPNATAudit","GoogleCloudPlatformNAT","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformNAT","azuresentinel","azure-sentinel-solution-gcp-nat","2025-05-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GCPNATLogsCCPDefinition","Microsoft","Google Cloud Platform NAT (via Codeless Connector Framework)","The Google Cloud Platform NAT data connector provides the capability to ingest Cloud NAT Audit logs and Cloud NAT Traffic logs into Microsoft Sentinel using the Compute Engine API. Refer the [Product overview](https://cloud.google.com/nat/docs/overview) document for more details.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformNAT/Data%20Connectors/GCPNATLogs_ccp/GCPNATLogs_ConnectorDefinition.json","true" -"GCPResourceManager","GoogleCloudPlatformResourceManager","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformResourceManager","azuresentinel","azure-sentinel-solution-gcp-rm","2025-03-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GCPResourceManagerLogsCCFDefinition","Microsoft","Google Cloud Platform Resource Manager (via Codeless Connector Framework)","The Google Cloud Platform Resource Manager data connector provides the capability to ingest Resource Manager [Admin Activity and Data Access Audit logs](https://cloud.google.com/resource-manager/docs/audit-logging) into Microsoft Sentinel using the Cloud Resource Manager API. Refer the [Product overview](https://cloud.google.com/resource-manager/docs/cloud-platform-resource-hierarchy) document for more details.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformResourceManager/Data%20Connectors/GCPResourceManagerAuditLogs_ccf/GCPResourceManagerAuditLogs_ConnectorDefinition.json","true" -"GCPCloudSQL","GoogleCloudPlatformSQL","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformSQL","azuresentinel","azure-sentinel-solution-gcpsql","2021-07-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GCPCloudSQLCCFDefinition","Microsoft","GCP Cloud SQL (via Codeless Connector Framework)","The GCP Cloud SQL data connector provides the capability to ingest Audit logs into Microsoft Sentinel using the GCP Cloud SQL API. Refer to [GCP cloud SQL Audit Logs](https://cloud.google.com/sql/docs/mysql/audit-logging) documentation for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformSQL/Data%20Connectors/GCPCloudSQLLog_CCF/GCPCloudSQLLog_ConnectorDefinition.json","true" -"","GoogleDirectory","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleDirectory","","","","","","","","","","","","","","","","false" -"GoogleWorkspaceReports","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceCCPDefinition","Microsoft","Google Workspace Activities (via Codeless Connector Framework)","The [Google Workspace](https://workspace.google.com/) Activities data connector provides the capability to ingest Activity Events from [Google Workspace API](https://developers.google.com/admin-sdk/reports/reference/rest/v1/activities/list) into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GoogleWorkspaceTemplate_ccp/GoogleWorkspaceReports_DataConnectorDefinition.json","true" -"GWorkspace_ReportsAPI_access_transparency_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_admin_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_calendar_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_chat_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_chrome_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_context_aware_access_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_data_studio_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_drive_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_gcp_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_gplus_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_groups_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_groups_enterprise_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_jamboard_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_keep_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_login_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_meet_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_mobile_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_rules_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_saml_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_token_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_user_accounts_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GoogleWorkspaceReports_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"ThreatIntelligenceIndicator","GreyNoiseThreatIntelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GreyNoiseThreatIntelligence","greynoiseintelligenceinc1681236078693","microsoft-sentinel-byol-greynoise","2023-09-05","2025-07-28","","GreyNoise","Partner","https://www.greynoise.io/contact/general","","domains","GreyNoise2SentinelAPI","GreyNoise, Inc. and BlueCycle LLC","GreyNoise Threat Intelligence","This Data Connector installs an Azure Function app to download GreyNoise indicators once per day and inserts them into the ThreatIntelligenceIndicator table in Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GreyNoiseThreatIntelligence/Data%20Connectors/GreyNoiseConnector_UploadIndicatorsAPI.json","true" -"","Group-IB","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Group-IB","","","","","","","","","","","","","","","","false" -"","HIPAA Compliance","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/HIPAA%20Compliance","azuresentinel","azure-sentinel-solution-hipaacompliance","2025-10-08","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","false" -"","HYAS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/HYAS","hyas","a-hyas-insight-azure-sentinel-solutions-gallery","2021-10-20","","","HYAS","Partner","https://www.hyas.com/contact","","domains","","","","","","false" -"HYASProtectDnsSecurityLogs_CL","HYAS Protect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/HYAS%20Protect","hyas","microsoft-sentinel-solution-hyas-protect","2023-09-26","","","HYAS","Partner","https://www.hyas.com/contact","","domains","HYASProtect","HYAS","HYAS Protect","HYAS Protect provide logs based on reputation values - Blocked, Malicious, Permitted, Suspicious.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/HYAS%20Protect/Data%20Connectors/HYASProtect_FunctionApp.json","true" -"net_assets_CL","HolmSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/HolmSecurity","holmsecurityswedenab1639511288603","holmsecurity_sc_sentinel","2022-07-18","","","Holm Security","Partner","https://support.holmsecurity.com/","","domains","HolmSecurityAssets","Holm Security","Holm Security Asset Data","The connector provides the capability to poll data from Holm Security Center into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/HolmSecurity/Data%20Connectors/HolmSecurityAssets_API_FunctionApp.json","true" -"web_assets_CL","HolmSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/HolmSecurity","holmsecurityswedenab1639511288603","holmsecurity_sc_sentinel","2022-07-18","","","Holm Security","Partner","https://support.holmsecurity.com/","","domains","HolmSecurityAssets","Holm Security","Holm Security Asset Data","The connector provides the capability to poll data from Holm Security Center into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/HolmSecurity/Data%20Connectors/HolmSecurityAssets_API_FunctionApp.json","true" -"","HoneyTokens","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/HoneyTokens","","","","","","","","","","","","","","","","false" -"CyberpionActionItems_CL","IONIX","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IONIX","cyberpion1597832716616","cyberpion_mss","2022-05-02","","","IONIX","Partner","https://www.ionix.io/contact-us/","","domains","CyberpionSecurityLogs","IONIX","IONIX Security Logs","The IONIX Security Logs data connector, ingests logs from the IONIX system directly into Sentinel. The connector allows users to visualize their data, create alerts and incidents and improve security investigations.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IONIX/Data%20Connectors/IONIXSecurityLogs.json","true" -"","IPQualityScore","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPQualityScore","ipqualityscorellc1632794263588","ipqs_1","2021-10-20","","","IPQS Plugins Team","Partner","https://www.ipqualityscore.com/contact-us","","domains","","","","","","false" -"Ipinfo_ASN_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoASNDataConnector","IPinfo","IPinfo ASN Data Connector","This IPinfo data connector installs an Azure Function app to download standard_ASN datasets and insert it into custom log table in Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/ASN/IPinfo_ASN_API_AzureFunctionApp.json","true" -"Ipinfo_Abuse_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoAbuseDataConnector","IPinfo","IPinfo Abuse Data Connector","This IPinfo data connector installs an Azure Function app to download standard_abuse datasets and insert it into custom log table in Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/Abuse/IPinfo_Abuse_API_AzureFunctionApp.json","true" -"Ipinfo_Carrier_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoCarrierDataConnector","IPinfo","IPinfo Carrier Data Connector","This IPinfo data connector installs an Azure Function app to download standard_carrier datasets and insert it into custom log table in Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/Carrier/IPinfo_Carrier_API_AzureFunctionApp.json","true" -"Ipinfo_Company_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoCompanyDataConnector","IPinfo","IPinfo Company Data Connector","This IPinfo data connector installs an Azure Function app to download standard_company datasets and insert it into custom log table in Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/Company/IPinfo_Company_API_AzureFunctionApp.json","true" -"Ipinfo_Country_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoCountryDataConnector","IPinfo","IPinfo Country ASN Data Connector","This IPinfo data connector installs an Azure Function app to download country_asn datasets and insert it into custom log table in Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/Country%20ASN/IPinfo_Country_API_AzureFunctionApp.json","true" -"Ipinfo_Domain_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoDomainDataConnector","IPinfo","IPinfo Domain Data Connector","This IPinfo data connector installs an Azure Function app to download standard_domain datasets and insert it into custom log table in Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/Domain/IPinfo_Domain_API_AzureFunctionApp.json","true" -"Ipinfo_Location_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoIplocationDataConnector","IPinfo","IPinfo Iplocation Data Connector","This IPinfo data connector installs an Azure Function app to download standard_location datasets and insert it into custom log table in Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/Iplocation/IPinfo_Iplocation_API_AzureFunctionApp.json","true" -"Ipinfo_Location_extended_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoIplocationExtendedDataConnector","IPinfo","IPinfo Iplocation Extended Data Connector","This IPinfo data connector installs an Azure Function app to download standard_location_extended datasets and insert it into custom log table in Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/Iplocation%20Extended/IPinfo_Iplocation_Extended_API_AzureFunctionApp.json","true" -"Ipinfo_Privacy_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoPrivacyDataConnector","IPinfo","IPinfo Privacy Data Connector","This IPinfo data connector installs an Azure Function app to download standard_privacy datasets and insert it into custom log table in Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/Privacy/IPinfo_Privacy_API_AzureFunctionApp.json","true" -"Ipinfo_Privacy_extended_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoPrivacyExtendedDataConnector","IPinfo","IPinfo Privacy Extended Data Connector","This IPinfo data connector installs an Azure Function app to download standard_privacy datasets and insert it into custom log table in Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/Privacy%20Extended/IPinfo_Privacy_Extended_API_AzureFunctionApp.json","true" -"Ipinfo_RIRWHOIS_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoRIRWHOISDataConnector","IPinfo","IPinfo RIRWHOIS Data Connector","This IPinfo data connector installs an Azure Function app to download RIRWHOIS datasets and insert it into custom log table in Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/RIRWHOIS/IPinfo_RIRWHOIS_API_AzureFunctionApp.json","true" -"Ipinfo_RWHOIS_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoRWHOISDataConnector","IPinfo","IPinfo RWHOIS Data Connector","This IPinfo data connector installs an Azure Function app to download RWHOIS datasets and insert it into custom log table in Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/RWHOIS/IPinfo_RWHOIS_API_AzureFunctionApp.json","true" -"Ipinfo_WHOIS_ASN_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoWHOISASNDataConnector","IPinfo","IPinfo WHOIS ASN Data Connector","This IPinfo data connector installs an Azure Function app to download WHOIS_ASN datasets and insert it into custom log table in Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/WHOIS%20ASN/IPinfo_WHOIS_ASN_API_AzureFunctionApp.json","true" -"Ipinfo_WHOIS_MNT_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoWHOISMNTDataConnector","IPinfo","IPinfo WHOIS MNT Data Connector","This IPinfo data connector installs an Azure Function app to download WHOIS_MNT datasets and insert it into custom log table in Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/WHOIS%20MNT/IPinfo_WHOIS_MNT_API_AzureFunctionApp.json","true" -"Ipinfo_WHOIS_NET_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoWHOISNETDataConnector","IPinfo","IPinfo WHOIS NET Data Connector","This IPinfo data connector installs an Azure Function app to download WHOIS_NET datasets and insert it into custom log table in Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/WHOIS%20NET/IPinfo_WHOIS_NET_API_AzureFunctionApp.json","true" -"Ipinfo_WHOIS_ORG_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoWHOISORGDataConnector","IPinfo","IPinfo WHOIS ORG Data Connector","This IPinfo data connector installs an Azure Function app to download WHOIS_ORG datasets and insert it into custom log table in Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/WHOIS%20ORG/IPinfo_WHOIS_ORG_API_AzureFunctionApp.json","true" -"Ipinfo_WHOIS_POC_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoWHOISPOCDataConnector","IPinfo","IPinfo WHOIS POC Data Connector","This IPinfo data connector installs an Azure Function app to download WHOIS_POC datasets and insert it into custom log table in Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/WHOIS%20POC/IPinfo_WHOIS_POC_API_AzureFunctionApp.json","true" -"Syslog","ISC Bind","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ISC%20Bind","azuresentinel","azure-sentinel-solution-iscbind","2022-09-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ISCBind","ISC","[Deprecated] ISC Bind","The [ISC Bind](https://www.isc.org/bind/) connector allows you to easily connect your ISC Bind logs with Microsoft Sentinel. This gives you more insight into your organization's network traffic data, DNS query data, traffic statistics and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ISC%20Bind/Data%20Connectors/Connector_Syslog_ISCBind.json","true" -"CommonSecurityLog","Illumio Core","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illumio%20Core","azuresentinel","azure-sentinel-solution-illumiocore","2022-05-26","","","Microsoft","Microsoft","https://support.microsoft.com","","domains","IllumioCore","Illumio","[Deprecated] Illumio Core via Legacy Agent","The [Illumio Core](https://www.illumio.com/products/) data connector provides the capability to ingest Illumio Core logs into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illumio%20Core/Data%20Connectors/Connector_IllumioCore_CEF.json","true" -"CommonSecurityLog","Illumio Core","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illumio%20Core","azuresentinel","azure-sentinel-solution-illumiocore","2022-05-26","","","Microsoft","Microsoft","https://support.microsoft.com","","domains","IllumioCoreAma","Illumio","[Deprecated] Illumio Core via AMA","The [Illumio Core](https://www.illumio.com/products/) data connector provides the capability to ingest Illumio Core logs into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illumio%20Core/Data%20Connectors/template_IllumioCoreAMA.json","true" -"IllumioInsights_CL","Illumio Insight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illumio%20Insight","illumioinc1629822633689","azure-sentinel-solution-illumioinsight","2025-08-10","","","Illumio","Partner","https://www.illumio.com/support/support","","domains","IllumioInsightsDefinition","Microsoft","Illumio Insights","Illumio Insights Connector sends workload and security graph data from Illumio Insights into the Azure Microsoft Sentinel Data Lake, providing deep context for threat detection, lateral movement analysis, and real-time investigation.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illumio%20Insight/Data%20Connectors/IllumioInsight_CCP/IllumioInsight_Definition.json","true" -"IllumioInsightsSummary_CL","Illumio Insight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illumio%20Insight","illumioinc1629822633689","azure-sentinel-solution-illumioinsight","2025-08-10","","","Illumio","Partner","https://www.illumio.com/support/support","","domains","IllumioInsightsSummaryCCP","Illumio","Illumio Insights Summary","The Illumio Insights Summary connector Publishes AI-powered threat discovery and anomaly reports generated by the Illumio Insights Agent. Leveraging the MITRE ATT&CK framework, these reports surface high-fidelity insights into emerging threats and risky behaviors, directly into the Data Lake.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illumio%20Insight/Data%20Connectors/IllumioInsightsSummaryConnector_CCP/IllumioInsightsSummary_ConnectorDefinition.json","true" -"Illumio_Auditable_Events_CL","IllumioSaaS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IllumioSaaS","illumioinc1629822633689","illumio_sentinel","2024-05-13","","","Illumio","Partner","https://www.illumio.com/support/support","","domains","IllumioSaaSDataConnector","Illumio","Illumio SaaS","[Illumio](https://www.illumio.com/) connector provides the capability to ingest events into Microsoft Sentinel. The connector provides ability to ingest auditable and flow events from AWS S3 bucket.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IllumioSaaS/Data%20Connectors/IllumioSaaS_FunctionApp.json","true" -"Illumio_Flow_Events_CL","IllumioSaaS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IllumioSaaS","illumioinc1629822633689","illumio_sentinel","2024-05-13","","","Illumio","Partner","https://www.illumio.com/support/support","","domains","IllumioSaaSDataConnector","Illumio","Illumio SaaS","[Illumio](https://www.illumio.com/) connector provides the capability to ingest events into Microsoft Sentinel. The connector provides ability to ingest auditable and flow events from AWS S3 bucket.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IllumioSaaS/Data%20Connectors/IllumioSaaS_FunctionApp.json","true" -"IllumioFlowEventsV2_CL","IllumioSaaS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IllumioSaaS","illumioinc1629822633689","illumio_sentinel","2024-05-13","","","Illumio","Partner","https://www.illumio.com/support/support","","domains","IllumioSaasCCFDefinition","Microsoft","Illumio Saas","The Illumio Saas Cloud data connector provides the capability to ingest Flow logs into Microsoft Sentinel using the Illumio Saas Log Integration through AWS S3 Bucket. Refer to [Illumio Saas Log Integration](https://product-docs-repo.illumio.com/Tech-Docs/CloudSecure/out/en/administer-cloudsecure/connector.html#UUID-c14edaab-9726-1f23-9c4c-bc2937be39ee_section-idm234556433515698) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IllumioSaaS/Data%20Connectors/IllumioSaasLogs_ccf/IllumioSaasLogs_ConnectorDefinition.json","true" -"","Illusive Active Defense","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illusive%20Active%20Defense","","","","","","","","","","","","","","","","false" -"CommonSecurityLog","Illusive Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illusive%20Platform","illusivenetworks","illusive_platform_mss","2022-05-25","","","Illusive Networks","Partner","https://illusive.com/support","","domains","illusiveAttackManagementSystem","illusive","[Deprecated] Illusive Platform via Legacy Agent","The Illusive Platform Connector allows you to share Illusive's attack surface analysis data and incident logs with Microsoft Sentinel and view this information in dedicated dashboards that offer insight into your organization's attack surface risk (ASM Dashboard) and track unauthorized lateral movement in your organization's network (ADS Dashboard).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illusive%20Platform/Data%20Connectors/illusive%20Attack%20Management%20System.json","true" -"CommonSecurityLog","Illusive Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illusive%20Platform","illusivenetworks","illusive_platform_mss","2022-05-25","","","Illusive Networks","Partner","https://illusive.com/support","","domains","illusiveAttackManagementSystemAma","illusive","[Deprecated] Illusive Platform via AMA","The Illusive Platform Connector allows you to share Illusive's attack surface analysis data and incident logs with Microsoft Sentinel and view this information in dedicated dashboards that offer insight into your organization's attack surface risk (ASM Dashboard) and track unauthorized lateral movement in your organization's network (ADS Dashboard).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illusive%20Platform/Data%20Connectors/template_IllusivePlatformAMA.json","true" -"","Images","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Images","","","","","","","","","","","","","","","","false" -"CommonSecurityLog","Imperva WAF Gateway","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Imperva%20WAF%20Gateway","imperva","Imperva_WAF_Gateway_MSS","2022-05-02","","","Imperva","Partner","https://www.imperva.com/support/technical-support/","","domains","ImpervaWAFGateway","Imperva","Imperva WAF Gateway","The [Imperva](https://www.imperva.com) connector will allow you to quickly connect your Imperva WAF Gateway alerts to Azure Sentinel. This provides you additional insight into your organization's WAF traffic and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Imperva%20WAF%20Gateway/Data%20Connectors/Connector_Imperva_WAF_Gateway.json","true" -"ImpervaWAFCloudV2_CL","ImpervaCloudWAF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ImpervaCloudWAF","azuresentinel","azure-sentinel-solution-impervawafcloud","2021-09-28","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ImpervaCloudWAFLogsCCFDefinition","Microsoft","Imperva Cloud WAF","The Imperva WAF Cloud data connector provides the capability to ingest logs into Microsoft Sentinel using the Imperva Log Integration through AWS S3 Bucket. Refer to [Imperva WAF Cloud Log Integration](https://docs.imperva.com/bundle/cloud-application-security/page/settings/log-integration.htm) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ImpervaCloudWAF/Data%20Connectors/ImpervaCloudWAFLogs_ccf/ImpervaCloudWAFLogs_ConnectorDefinition.json","true" -"ImpervaWAFCloud_CL","ImpervaCloudWAF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ImpervaCloudWAF","azuresentinel","azure-sentinel-solution-impervawafcloud","2021-09-28","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ImpervaWAFCloudAPI","Imperva","Imperva Cloud WAF","The [Imperva Cloud WAF](https://www.imperva.com/resources/resource-library/datasheets/imperva-cloud-waf/) data connector provides the capability to integrate and ingest Web Application Firewall events into Microsoft Sentinel through the REST API. Refer to Log integration [documentation](https://docs.imperva.com/bundle/cloud-application-security/page/settings/log-integration.htm#Download) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ImpervaCloudWAF/Data%20Connectors/ImpervaWAFCloud_FunctionApp.json","true" -"CommonSecurityLog","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxCloudDataConnectorAma","Infoblox","[Recommended] Infoblox Cloud Data Connector via AMA","The Infoblox Cloud Data Connector allows you to easily connect your Infoblox data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCEFDataConnector/template_InfobloxCloudDataConnectorAma.JSON","true" -"Failed_Range_To_Ingest_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" -"Infoblox_Failed_Indicators_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" -"dossier_atp_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" -"dossier_atp_threat_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" -"dossier_dns_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" -"dossier_geo_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" -"dossier_infoblox_web_cat_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" -"dossier_inforank_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" -"dossier_malware_analysis_v3_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" -"dossier_nameserver_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" -"dossier_nameserver_matches_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" -"dossier_ptr_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" -"dossier_rpz_feeds_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" -"dossier_rpz_feeds_records_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" -"dossier_threat_actor_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" -"dossier_tld_risk_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" -"dossier_whitelist_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" -"dossier_whois_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" -"CommonSecurityLog","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxSOCInsightsDataConnector_AMA","Infoblox","[Recommended] Infoblox SOC Insight Data Connector via AMA","The Infoblox SOC Insight Data Connector allows you to easily connect your Infoblox BloxOne SOC Insight data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.

This data connector ingests Infoblox SOC Insight CDC logs into your Log Analytics Workspace using the new Azure Monitor Agent. Learn more about ingesting using the new Azure Monitor Agent [here](https://learn.microsoft.com/azure/sentinel/connect-cef-ama). **Microsoft recommends using this Data Connector.**","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxSOCInsights/InfobloxSOCInsightsDataConnector_AMA.json","true" -"InfobloxInsight_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxSOCInsightsDataConnector_API","Infoblox","Infoblox SOC Insight Data Connector via REST API","The Infoblox SOC Insight Data Connector allows you to easily connect your Infoblox BloxOne SOC Insight data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxSOCInsights/InfobloxSOCInsightsDataConnector_API.json","true" -"CommonSecurityLog","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxSOCInsightsDataConnector_Legacy","Infoblox","[Deprecated] Infoblox SOC Insight Data Connector via Legacy Agent","The Infoblox SOC Insight Data Connector allows you to easily connect your Infoblox BloxOne SOC Insight data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.

This data connector ingests Infoblox SOC Insight CDC logs into your Log Analytics Workspace using the legacy Log Analytics agent.

**Microsoft recommends installation of Infoblox SOC Insight Data Connector via AMA Connector.** The legacy connector uses the Log Analytics agent which is about to be deprecated by **Aug 31, 2024,** and should only be installed where AMA is not supported.

Using MMA and AMA on the same machine can cause log duplication and extra ingestion cost. [More details](https://learn.microsoft.com/en-us/azure/sentinel/ama-migrate).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxSOCInsights/InfobloxSOCInsightsDataConnector_Legacy.json","true" -"CommonSecurityLog","Infoblox Cloud Data Connector","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20Cloud%20Data%20Connector","infoblox","infoblox-cdc-solution","2021-10-20","","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxCloudDataConnector","Infoblox","[Deprecated] Infoblox Cloud Data Connector via Legacy Agent","The Infoblox Cloud Data Connector allows you to easily connect your Infoblox BloxOne data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20Cloud%20Data%20Connector/Data%20Connectors/InfobloxCloudDataConnector.json","true" -"CommonSecurityLog","Infoblox Cloud Data Connector","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20Cloud%20Data%20Connector","infoblox","infoblox-cdc-solution","2021-10-20","","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxCloudDataConnectorAma","Infoblox","[Deprecated] Infoblox Cloud Data Connector via AMA","The Infoblox Cloud Data Connector allows you to easily connect your Infoblox BloxOne data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20Cloud%20Data%20Connector/Data%20Connectors/template_InfobloxCloudDataConnectorAMA.json","true" -"Syslog","Infoblox NIOS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20NIOS","azuresentinel","azure-sentinel-solution-infobloxnios","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","InfobloxNIOS","Infoblox","[Deprecated] Infoblox NIOS","The [Infoblox Network Identity Operating System (NIOS)](https://www.infoblox.com/glossary/network-identity-operating-system-nios/) connector allows you to easily connect your Infoblox NIOS logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20NIOS/Data%20Connectors/Connector_Syslog_Infoblox.json","true" -"CommonSecurityLog","Infoblox SOC Insights","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights","infoblox","infoblox-soc-insight-solution","2024-03-06","","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxSOCInsightsDataConnector_AMA","Infoblox","[Deprecated] Infoblox SOC Insight Data Connector via AMA","The Infoblox SOC Insight Data Connector allows you to easily connect your Infoblox BloxOne SOC Insight data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.

This data connector ingests Infoblox SOC Insight CDC logs into your Log Analytics Workspace using the new Azure Monitor Agent. Learn more about ingesting using the new Azure Monitor Agent [here](https://learn.microsoft.com/azure/sentinel/connect-cef-ama). **Microsoft recommends using this Data Connector.**","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights/Data%20Connectors/InfobloxSOCInsightsDataConnector_AMA.json","true" -"InfobloxInsight_CL","Infoblox SOC Insights","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights","infoblox","infoblox-soc-insight-solution","2024-03-06","","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxSOCInsightsDataConnector_API","Infoblox","Infoblox SOC Insight Data Connector via REST API","The Infoblox SOC Insight Data Connector allows you to easily connect your Infoblox BloxOne SOC Insight data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights/Data%20Connectors/InfobloxSOCInsightsDataConnector_API.json","true" -"CommonSecurityLog","Infoblox SOC Insights","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights","infoblox","infoblox-soc-insight-solution","2024-03-06","","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxSOCInsightsDataConnector_Legacy","Infoblox","[Deprecated] Infoblox SOC Insight Data Connector via Legacy Agent","The Infoblox SOC Insight Data Connector allows you to easily connect your Infoblox BloxOne SOC Insight data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.

This data connector ingests Infoblox SOC Insight CDC logs into your Log Analytics Workspace using the legacy Log Analytics agent.

**Microsoft recommends installation of Infoblox SOC Insight Data Connector via AMA Connector.** The legacy connector uses the Log Analytics agent which is about to be deprecated by **Aug 31, 2024,** and should only be installed where AMA is not supported.

Using MMA and AMA on the same machine can cause log duplication and extra ingestion cost. [More details](https://learn.microsoft.com/en-us/azure/sentinel/ama-migrate).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights/Data%20Connectors/InfobloxSOCInsightsDataConnector_Legacy.json","true" -"","InsightVM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/InsightVM","","","","","","","","","","","","","","","","false" -"atlassian_beacon_alerts_CL","Integration for Atlassian Beacon","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Integration%20for%20Atlassian%20Beacon","defendlimited1682894612656","microsoft-sentinel-solution-atlassian-beacon","2023-09-22","","","DEFEND Ltd.","Partner","https://www.defend.co.nz/","","domains","AtlassianBeaconAlerts","DEFEND Ltd.","Atlassian Beacon Alerts","Atlassian Beacon is a cloud product that is built for Intelligent threat detection across the Atlassian platforms (Jira, Confluence, and Atlassian Admin). This can help users detect, investigate and respond to risky user activity for the Atlassian suite of products. The solution is a custom data connector from DEFEND Ltd. that is used to visualize the alerts ingested from Atlassian Beacon to Microsoft Sentinel via a Logic App.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Integration%20for%20Atlassian%20Beacon/Data%20Connectors/AtlassianBeacon_DataConnector.json","true" -"","Intel471","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Intel471","intel471inc1641226539011","microsoft-sentinel-solution-intel471","2023-06-21","","","Intel 471","Partner","https://intel471.com/company/contact","","domains","","","","","","false" -"","IoTOTThreatMonitoringwithDefenderforIoT","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IoTOTThreatMonitoringwithDefenderforIoT","azuresentinel","azure-sentinel-solution-unifiedmicrosoftsocforot","2021-10-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"","IronNet IronDefense","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IronNet%20IronDefense","ironnetcybersecurity1585849518753","irondefense-for-sentinel","2021-10-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"Island_Admin_CL","Island","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Island","islandtechnologyinc1679434413850","island-sentinel-solution","2023-05-02","2023-07-20","","Island","Partner","https://www.island.io","","domains","Island_Admin_Polling","Island","Island Enterprise Browser Admin Audit (Polling CCP)","The [Island](https://www.island.io) Admin connector provides the capability to ingest Island Admin Audit logs into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Island/Data%20Connectors/IslandAdminAPIConnector.json","true" -"Island_User_CL","Island","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Island","islandtechnologyinc1679434413850","island-sentinel-solution","2023-05-02","2023-07-20","","Island","Partner","https://www.island.io","","domains","Island_User_Polling","Island","Island Enterprise Browser User Activity (Polling CCP)","The [Island](https://www.island.io) connector provides the capability to ingest Island User Activity logs into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Island/Data%20Connectors/IslandUserAPIConnector.json","true" -"Syslog","Ivanti Unified Endpoint Management","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Ivanti%20Unified%20Endpoint%20Management","azuresentinel","azure-sentinel-solution-ivantiuem","2022-07-05","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","IvantiUEM","Ivanti","[Deprecated] Ivanti Unified Endpoint Management","The [Ivanti Unified Endpoint Management](https://www.ivanti.com/products/unified-endpoint-manager) data connector provides the capability to ingest [Ivanti UEM Alerts](https://help.ivanti.com/ld/help/en_US/LDMS/11.0/Windows/alert-c-monitoring-overview.htm) into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Ivanti%20Unified%20Endpoint%20Management/Data%20Connectors/Ivanti_UEM_Syslog.json","true" -"JBossLogs_CL","JBoss","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/JBoss","azuresentinel","azure-sentinel-solution-jboss","2021-10-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","JBossEAP","Red Hat","[Deprecated] JBoss Enterprise Application Platform","The JBoss Enterprise Application Platform data connector provides the capability to ingest [JBoss](https://www.redhat.com/en/technologies/jboss-middleware/application-platform) events into Microsoft Sentinel. Refer to [Red Hat documentation](https://access.redhat.com/documentation/en-us/red_hat_jboss_enterprise_application_platform/7.0/html/configuration_guide/logging_with_jboss_eap) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/JBoss/Data%20Connectors/Connector_JBoss.json","true" -"jamfprotectalerts_CL","Jamf Protect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Jamf%20Protect","jamfsoftwareaustraliaptyltd1620360395539","jamf_protect","2022-10-10","2025-09-02","","Jamf Software, LLC","Partner","https://www.jamf.com/support/","","domains","JamfProtectPush","Jamf","Jamf Protect Push Connector","The [Jamf Protect](https://www.jamf.com/products/jamf-protect/) connector provides the capability to read raw event data from Jamf Protect in Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Jamf%20Protect/Data%20Connectors/JamfProtect_ccp/connectorDefinition.json","true" -"jamfprotecttelemetryv2_CL","Jamf Protect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Jamf%20Protect","jamfsoftwareaustraliaptyltd1620360395539","jamf_protect","2022-10-10","2025-09-02","","Jamf Software, LLC","Partner","https://www.jamf.com/support/","","domains","JamfProtectPush","Jamf","Jamf Protect Push Connector","The [Jamf Protect](https://www.jamf.com/products/jamf-protect/) connector provides the capability to read raw event data from Jamf Protect in Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Jamf%20Protect/Data%20Connectors/JamfProtect_ccp/connectorDefinition.json","true" -"jamfprotectunifiedlogs_CL","Jamf Protect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Jamf%20Protect","jamfsoftwareaustraliaptyltd1620360395539","jamf_protect","2022-10-10","2025-09-02","","Jamf Software, LLC","Partner","https://www.jamf.com/support/","","domains","JamfProtectPush","Jamf","Jamf Protect Push Connector","The [Jamf Protect](https://www.jamf.com/products/jamf-protect/) connector provides the capability to read raw event data from Jamf Protect in Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Jamf%20Protect/Data%20Connectors/JamfProtect_ccp/connectorDefinition.json","true" -"","Joshua-Cyberiskvision","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Joshua-Cyberiskvision","almavivaspa1636563933762","joshua-cyberiskvision","2022-01-10","2022-01-10","","Joshua Cyberiskvision","Partner","https://www.cyberiskvision.com/","","domains","","","","","","false" -"Syslog","Juniper SRX","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Juniper%20SRX","azuresentinel","azure-sentinel-solution-junipersrx","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","JuniperSRX","Juniper","[Deprecated] Juniper SRX","The [Juniper SRX](https://www.juniper.net/us/en/products-services/security/srx-series/) connector allows you to easily connect your Juniper SRX logs with Microsoft Sentinel. This gives you more insight into your organization's network and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Juniper%20SRX/Data%20Connectors/Connector_Syslog_JuniperSRX.json","true" -"JuniperIDP_CL","JuniperIDP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/JuniperIDP","azuresentinel","azure-sentinel-solution-juniperidp","2021-03-31","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","JuniperIDP","Juniper","[Deprecated] Juniper IDP","The [Juniper](https://www.juniper.net/) IDP data connector provides the capability to ingest [Juniper IDP](https://www.juniper.net/documentation/us/en/software/junos/idp-policy/topics/topic-map/security-idp-overview.html) events into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/JuniperIDP/Data%20Connectors/Connector_LogAnalytics_agent_JuniperIDP.json","true" -"","KQL Training","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/KQL%20Training","microsoftsentinelcommunity","azure-sentinel-solution-kqltraining","2022-11-30","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","","","","","","false" -"KeeperSecurityEventNewLogs_CL","Keeper Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Keeper%20Security","keepersecurity","keeper-security-integration","2025-06-03","2025-06-03","","Keeper Security","Partner","https://www.keepersecurity.com","","domains","KeeperSecurityPush2","Keeper Security","Keeper Security Push Connector","The [Keeper Security](https://keepersecurity.com) connector provides the capability to read raw event data from Keeper Security in Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Keeper%20Security/Data%20Connectors/KeeperSecurity_ccp/KepperSecurity_Definition.json","true" -"LastPassNativePoller_CL","LastPass","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/LastPass","thecollectiveconsultingbv1584980370320","lastpass-enterprise-monitoring-solution","2021-10-20","2022-01-12","","The Collective Consulting","Partner","https://thecollective.eu","","domains","LastPass_Polling","The Collective Consulting BV","LastPass Enterprise - Reporting (Polling CCP)","The [LastPass Enterprise](https://www.lastpass.com/products/enterprise-password-management-and-sso) connector provides the capability to LastPass reporting (audit) logs into Microsoft Sentinel. The connector provides visibility into logins and activity within LastPass (such as reading and removing passwords).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/LastPass/Data%20Connectors/LastPassAPIConnector.json","true" -"","Legacy IOC based Threat Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Legacy%20IOC%20based%20Threat%20Protection","azuresentinel","azure-sentinel-solution-ioclegacy","2022-12-19","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"Lookout_CL","Lookout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Lookout","lookoutinc","lookout_mtd_sentinel","2021-10-18","","","Lookout","Partner","https://www.lookout.com/support","","domains","LookoutAPI","Lookout","[DEPRECATED] Lookout","The [Lookout](https://lookout.com) data connector provides the capability to ingest [Lookout](https://enterprise.support.lookout.com/hc/en-us/articles/115002741773-Mobile-Risk-API-Guide#commoneventfields) events into Microsoft Sentinel through the Mobile Risk API. Refer to [API documentation](https://enterprise.support.lookout.com/hc/en-us/articles/115002741773-Mobile-Risk-API-Guide) for more information. The [Lookout](https://lookout.com) data connector provides ability to get events which helps to examine potential security risks and more.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Lookout/Data%20Connectors/Lookout_API_FunctionApp.json","true" -"LookoutMtdV2_CL","Lookout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Lookout","lookoutinc","lookout_mtd_sentinel","2021-10-18","","","Lookout","Partner","https://www.lookout.com/support","","domains","LookoutStreaming_Definition","Microsoft","Lookout Mobile Threat Detection Connector (via Codeless Connector Framework) (Preview)","The [Lookout Mobile Threat Detection](https://lookout.com) data connector provides the capability to ingest events related to mobile security risks into Microsoft Sentinel through the Mobile Risk API. Refer to [API documentation](https://enterprise.support.lookout.com/hc/en-us/articles/115002741773-Mobile-Risk-API-Guide) for more information. This connector helps you examine potential security risks detected in mobile devices.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Lookout/Data%20Connectors/LookoutStreamingConnector_ccp/LookoutStreaming_DataConnectorDefinition.json","true" -"LookoutCloudSecurity_CL","Lookout Cloud Security Platform for Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Lookout%20Cloud%20Security%20Platform%20for%20Microsoft%20Sentinel","lookoutinc","lookout_cloudsecurity_sentinel","2023-02-17","","","Lookout","Partner","https://www.lookout.com/support","","domains","LookoutCloudSecurityDataConnector","Lookout","Lookout Cloud Security for Microsoft Sentinel","This connector uses a Agari REST API connection to push data into Microsoft Sentinel Log Analytics.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Lookout%20Cloud%20Security%20Platform%20for%20Microsoft%20Sentinel/Data%20Connectors/LookoutCSConnector/LookoutCloudSecurityConnector_API_FunctionApp.json","true" -"ThreatIntelIndicators","Lumen Defender Threat Feed","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Lumen%20Defender%20Threat%20Feed","centurylink","azure-sentinel-solution-lumen-defender-threat-feed","2025-09-12","2025-09-12","","Lumen Technologies, Inc.","Partner","https://www.lumen.com/en-us/contact-us/support.html","","domains","LumenThreatFeedConnector","Lumen Technologies, Inc.","Lumen Defender Threat Feed Data Connector","The [Lumen Defender Threat Feed](https://bll-analytics.mss.lumen.com/analytics) connector provides the capability to ingest STIX-formatted threat intelligence indicators from Lumen's Black Lotus Labs research team into Microsoft Sentinel. The connector automatically downloads and uploads daily threat intelligence indicators including IPv4 addresses and domains to the ThreatIntelIndicators table via the STIX Objects Upload API.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Lumen%20Defender%20Threat%20Feed/Data%20Connectors/LumenThreatFeed/LumenThreatFeedConnector_ConnectorUI.json","true" -"ThreatIntelligenceIndicator","MISP2Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MISP2Sentinel","microsoftsentinelcommunity","azure-sentinel-solution-misp2sentinel","2023-07-29","2023-07-29","","Community","Community","https://github.com/cudeso/misp2sentinel","","domains,verticals","MISP2SentinelConnector","MISP project & cudeso.be","MISP2Sentinel","This solution installs the MISP2Sentinel connector that allows you to automatically push threat indicators from MISP to Microsoft Sentinel via the Upload Indicators REST API. After installing the solution, configure and enable this data connector by following guidance in Manage solution view.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MISP2Sentinel/Data%20Connectors/MISP2SentinelConnector_UploadIndicatorsAPI.json","true" -"MailGuard365_Threats_CL","MailGuard 365","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MailGuard%20365","mailguardptylimited","microsoft-sentinel-solution-mailguard365","2023-05-09","2023-06-08","","MailGuard 365","Partner","https://www.mailguard365.com/support/","","domains","MailGuard365","MailGuard365","MailGuard 365","MailGuard 365 Enhanced Email Security for Microsoft 365. Exclusive to the Microsoft marketplace, MailGuard 365 is integrated with Microsoft 365 security (incl. Defender) for enhanced protection against advanced email threats like phishing, ransomware and sophisticated BEC attacks.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MailGuard%20365/Data%20Connectors/MailGuard365.json","true" -"MailRiskEventEmails_CL","MailRisk","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MailRisk","securepracticeas1650887373770","microsoft-sentinel-solution-mailrisk","2023-03-16","2025-10-27","","Secure Practice","Partner","https://securepractice.co/support","","domains","SecurePracticeMailRiskConnector","Secure Practice","MailRisk by Secure Practice","The MailRisk by Secure Practice connector allows you to ingest email threat intelligence data from the MailRisk API into Microsoft Sentinel. This connector provides visibility into reported emails, risk assessments, and security events related to email threats.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MailRisk/Data%20Connectors/MailRisk_CCP/MailRisk_ConnectorDefinition.json","true" -"","Malware Protection Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Malware%20Protection%20Essentials","azuresentinel","azure-sentinel-solution-malwareprotection","2023-09-25","2023-09-25","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"","MarkLogicAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MarkLogicAudit","azuresentinel","azure-sentinel-solution-marklogicaudit","2022-08-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"","MaturityModelForEventLogManagementM2131","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MaturityModelForEventLogManagementM2131","azuresentinel","azure-sentinel-solution-maturitymodelforeventlogma","2021-12-05","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"Syslog","McAfee Network Security Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/McAfee%20Network%20Security%20Platform","azuresentinel","azure-sentinel-solution-mcafeensp","2021-06-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","McAfeeNSP","McAfee","[Deprecated] McAfee Network Security Platform","The [McAfee® Network Security Platform](https://www.mcafee.com/enterprise/en-us/products/network-security-platform.html) data connector provides the capability to ingest [McAfee® Network Security Platform events](https://docs.mcafee.com/bundle/network-security-platform-10.1.x-integration-guide-unmanaged/page/GUID-8C706BE9-6AC9-4641-8A53-8910B51207D8.html) into Microsoft Sentinel. Refer to [McAfee® Network Security Platform](https://docs.mcafee.com/bundle/network-security-platform-10.1.x-integration-guide-unmanaged/page/GUID-F7D281EC-1CC9-4962-A7A3-5A9D9584670E.html) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/McAfee%20Network%20Security%20Platform/Data%20Connectors/McAfeeNSP.json","true" -"Syslog","McAfee ePolicy Orchestrator","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/McAfee%20ePolicy%20Orchestrator","azuresentinel","azure-sentinel-solution-mcafeeepo","2021-03-25","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","McAfeeePO","McAfee","[Deprecated] McAfee ePolicy Orchestrator (ePO)","The McAfee ePolicy Orchestrator data connector provides the capability to ingest [McAfee ePO](https://www.mcafee.com/enterprise/en-us/products/epolicy-orchestrator.html) events into Microsoft Sentinel through the syslog. Refer to [documentation](https://docs.mcafee.com/bundle/epolicy-orchestrator-landing/page/GUID-0C40020F-5B7F-4549-B9CC-0E017BC8797F.html) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/McAfee%20ePolicy%20Orchestrator/Data%20Connectors/Connector_McAfee_ePO.json","true" -"OfficeActivity","Microsoft 365","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20365","azuresentinel","azure-sentinel-solution-office365","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","Office365","Microsoft","Microsoft 365 (formerly, Office 365)","The Microsoft 365 (formerly, Office 365) activity log connector provides insight into ongoing user activities. You will get details of operations such as file downloads, access requests sent, changes to group events, set-mailbox and details of the user who performed the actions. By connecting Microsoft 365 logs into Microsoft Sentinel you can use this data to view dashboards, create custom alerts, and improve your investigation process. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2219943&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20365/Data%20Connectors/Microsoft365.JSON","true" -"exchange","Microsoft 365","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20365","azuresentinel","azure-sentinel-solution-office365","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","Office365","Microsoft","Microsoft 365 (formerly, Office 365)","The Microsoft 365 (formerly, Office 365) activity log connector provides insight into ongoing user activities. You will get details of operations such as file downloads, access requests sent, changes to group events, set-mailbox and details of the user who performed the actions. By connecting Microsoft 365 logs into Microsoft Sentinel you can use this data to view dashboards, create custom alerts, and improve your investigation process. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2219943&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20365/Data%20Connectors/Microsoft365.JSON","true" -"sharePoint","Microsoft 365","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20365","azuresentinel","azure-sentinel-solution-office365","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","Office365","Microsoft","Microsoft 365 (formerly, Office 365)","The Microsoft 365 (formerly, Office 365) activity log connector provides insight into ongoing user activities. You will get details of operations such as file downloads, access requests sent, changes to group events, set-mailbox and details of the user who performed the actions. By connecting Microsoft 365 logs into Microsoft Sentinel you can use this data to view dashboards, create custom alerts, and improve your investigation process. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2219943&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20365/Data%20Connectors/Microsoft365.JSON","true" -"teams","Microsoft 365","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20365","azuresentinel","azure-sentinel-solution-office365","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","Office365","Microsoft","Microsoft 365 (formerly, Office 365)","The Microsoft 365 (formerly, Office 365) activity log connector provides insight into ongoing user activities. You will get details of operations such as file downloads, access requests sent, changes to group events, set-mailbox and details of the user who performed the actions. By connecting Microsoft 365 logs into Microsoft Sentinel you can use this data to view dashboards, create custom alerts, and improve your investigation process. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2219943&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20365/Data%20Connectors/Microsoft365.JSON","true" -"FinanceOperationsActivity_CL","Microsoft Business Applications","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Business%20Applications","sentinel4dynamics365","powerplatform","2023-04-19","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Dynamics365Finance","Microsoft","Dynamics 365 Finance and Operations","Dynamics 365 for Finance and Operations is a comprehensive Enterprise Resource Planning (ERP) solution that combines financial and operational capabilities to help businesses manage their day-to-day operations. It offers a range of features that enable businesses to streamline workflows, automate tasks, and gain insights into operational performance.

The Dynamics 365 Finance and Operations data connector ingests Dynamics 365 Finance and Operations admin activities and audit logs as well as user business process and application activities logs into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Business%20Applications/Data%20Connectors/DynamicsFinOpsPollerConnector/DynamicsFinOps_DataConnectorDefinition.json","true" -"LLMActivity","Microsoft Copilot","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Copilot","azuresentinel","azure-sentinel-solution-microsoftcopilot","2025-10-01","","","Microsoft","Microsoft","https://support.microsoft.com","","domains","MicrosoftCopilot","Microsoft","Microsoft Copilot","The Microsoft Copilot logs connector in Microsoft Sentinel enables the seamless ingestion of Copilot-generated activity logs into Microsoft Sentinel for advanced threat detection, investigation, and response. It collects telemetry from Microsoft Copilot services - such as usage data, prompts and system responses - and ingests into Microsoft Sentinel, allowing security teams to monitor for misuse, detect anomalies, and maintain compliance with organizational policies.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Copilot/Data%20Connectors/MicrosoftCopilot_ConnectorDefinition.json","true" -"SecurityAlert","Microsoft Defender For Identity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20For%20Identity","azuresentinel","azure-sentinel-solution-mdefenderforidentity","2022-04-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureAdvancedThreatProtection","Microsoft","Microsoft Defender for Identity","Connect Microsoft Defender for Identity to gain visibility into the events and user analytics. Microsoft Defender for Identity identifies, detects, and helps you investigate advanced threats, compromised identities, and malicious insider actions directed at your organization. Microsoft Defender for Identity enables SecOp analysts and security professionals struggling to detect advanced attacks in hybrid environments to:

- Monitor users, entity behavior, and activities with learning-based analytics​
- Protect user identities and credentials stored in Active Directory
- Identify and investigate suspicious user activities and advanced attacks throughout the kill chain
- Provide clear incident information on a simple timeline for fast triage

[Try now >](https://aka.ms/AtpTryNow)

[Deploy now >](https://aka.ms/AzureATP_Deploy)

For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2220069&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20For%20Identity/Data%20Connectors/MicrosoftDefenderforIdentity.JSON","true" -"","Microsoft Defender Threat Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20Threat%20Intelligence","azuresentinel","azure-sentinel-solution-microsoftdefenderthreatint","2023-03-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"AlertEvidence","Microsoft Defender XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR","azuresentinel","azure-sentinel-solution-microsoft365defender","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftThreatProtection","Microsoft","Microsoft Defender XDR","Microsoft Defender XDR is a unified, natively integrated, pre- and post-breach enterprise defense suite that protects endpoint, identity, email, and applications and helps you detect, prevent, investigate, and automatically respond to sophisticated threats.

Microsoft Defender XDR suite includes:
- Microsoft Defender for Endpoint
- Microsoft Defender for Identity
- Microsoft Defender for Office 365
- Threat & Vulnerability Management
- Microsoft Defender for Cloud Apps

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220004&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR/Data%20Connectors/MicrosoftThreatProtection.JSON","true" -"CloudAppEvents","Microsoft Defender XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR","azuresentinel","azure-sentinel-solution-microsoft365defender","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftThreatProtection","Microsoft","Microsoft Defender XDR","Microsoft Defender XDR is a unified, natively integrated, pre- and post-breach enterprise defense suite that protects endpoint, identity, email, and applications and helps you detect, prevent, investigate, and automatically respond to sophisticated threats.

Microsoft Defender XDR suite includes:
- Microsoft Defender for Endpoint
- Microsoft Defender for Identity
- Microsoft Defender for Office 365
- Threat & Vulnerability Management
- Microsoft Defender for Cloud Apps

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220004&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR/Data%20Connectors/MicrosoftThreatProtection.JSON","true" -"DeviceEvents","Microsoft Defender XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR","azuresentinel","azure-sentinel-solution-microsoft365defender","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftThreatProtection","Microsoft","Microsoft Defender XDR","Microsoft Defender XDR is a unified, natively integrated, pre- and post-breach enterprise defense suite that protects endpoint, identity, email, and applications and helps you detect, prevent, investigate, and automatically respond to sophisticated threats.

Microsoft Defender XDR suite includes:
- Microsoft Defender for Endpoint
- Microsoft Defender for Identity
- Microsoft Defender for Office 365
- Threat & Vulnerability Management
- Microsoft Defender for Cloud Apps

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220004&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR/Data%20Connectors/MicrosoftThreatProtection.JSON","true" -"DeviceFileCertificateInfo","Microsoft Defender XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR","azuresentinel","azure-sentinel-solution-microsoft365defender","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftThreatProtection","Microsoft","Microsoft Defender XDR","Microsoft Defender XDR is a unified, natively integrated, pre- and post-breach enterprise defense suite that protects endpoint, identity, email, and applications and helps you detect, prevent, investigate, and automatically respond to sophisticated threats.

Microsoft Defender XDR suite includes:
- Microsoft Defender for Endpoint
- Microsoft Defender for Identity
- Microsoft Defender for Office 365
- Threat & Vulnerability Management
- Microsoft Defender for Cloud Apps

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220004&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR/Data%20Connectors/MicrosoftThreatProtection.JSON","true" -"DeviceFileEvents","Microsoft Defender XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR","azuresentinel","azure-sentinel-solution-microsoft365defender","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftThreatProtection","Microsoft","Microsoft Defender XDR","Microsoft Defender XDR is a unified, natively integrated, pre- and post-breach enterprise defense suite that protects endpoint, identity, email, and applications and helps you detect, prevent, investigate, and automatically respond to sophisticated threats.

Microsoft Defender XDR suite includes:
- Microsoft Defender for Endpoint
- Microsoft Defender for Identity
- Microsoft Defender for Office 365
- Threat & Vulnerability Management
- Microsoft Defender for Cloud Apps

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220004&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR/Data%20Connectors/MicrosoftThreatProtection.JSON","true" -"DeviceImageLoadEvents","Microsoft Defender XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR","azuresentinel","azure-sentinel-solution-microsoft365defender","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftThreatProtection","Microsoft","Microsoft Defender XDR","Microsoft Defender XDR is a unified, natively integrated, pre- and post-breach enterprise defense suite that protects endpoint, identity, email, and applications and helps you detect, prevent, investigate, and automatically respond to sophisticated threats.

Microsoft Defender XDR suite includes:
- Microsoft Defender for Endpoint
- Microsoft Defender for Identity
- Microsoft Defender for Office 365
- Threat & Vulnerability Management
- Microsoft Defender for Cloud Apps

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220004&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR/Data%20Connectors/MicrosoftThreatProtection.JSON","true" -"DeviceInfo","Microsoft Defender XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR","azuresentinel","azure-sentinel-solution-microsoft365defender","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftThreatProtection","Microsoft","Microsoft Defender XDR","Microsoft Defender XDR is a unified, natively integrated, pre- and post-breach enterprise defense suite that protects endpoint, identity, email, and applications and helps you detect, prevent, investigate, and automatically respond to sophisticated threats.

Microsoft Defender XDR suite includes:
- Microsoft Defender for Endpoint
- Microsoft Defender for Identity
- Microsoft Defender for Office 365
- Threat & Vulnerability Management
- Microsoft Defender for Cloud Apps

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220004&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR/Data%20Connectors/MicrosoftThreatProtection.JSON","true" -"DeviceLogonEvents","Microsoft Defender XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR","azuresentinel","azure-sentinel-solution-microsoft365defender","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftThreatProtection","Microsoft","Microsoft Defender XDR","Microsoft Defender XDR is a unified, natively integrated, pre- and post-breach enterprise defense suite that protects endpoint, identity, email, and applications and helps you detect, prevent, investigate, and automatically respond to sophisticated threats.

Microsoft Defender XDR suite includes:
- Microsoft Defender for Endpoint
- Microsoft Defender for Identity
- Microsoft Defender for Office 365
- Threat & Vulnerability Management
- Microsoft Defender for Cloud Apps

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220004&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR/Data%20Connectors/MicrosoftThreatProtection.JSON","true" -"DeviceNetworkEvents","Microsoft Defender XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR","azuresentinel","azure-sentinel-solution-microsoft365defender","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftThreatProtection","Microsoft","Microsoft Defender XDR","Microsoft Defender XDR is a unified, natively integrated, pre- and post-breach enterprise defense suite that protects endpoint, identity, email, and applications and helps you detect, prevent, investigate, and automatically respond to sophisticated threats.

Microsoft Defender XDR suite includes:
- Microsoft Defender for Endpoint
- Microsoft Defender for Identity
- Microsoft Defender for Office 365
- Threat & Vulnerability Management
- Microsoft Defender for Cloud Apps

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220004&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR/Data%20Connectors/MicrosoftThreatProtection.JSON","true" -"DeviceNetworkInfo","Microsoft Defender XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR","azuresentinel","azure-sentinel-solution-microsoft365defender","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftThreatProtection","Microsoft","Microsoft Defender XDR","Microsoft Defender XDR is a unified, natively integrated, pre- and post-breach enterprise defense suite that protects endpoint, identity, email, and applications and helps you detect, prevent, investigate, and automatically respond to sophisticated threats.

Microsoft Defender XDR suite includes:
- Microsoft Defender for Endpoint
- Microsoft Defender for Identity
- Microsoft Defender for Office 365
- Threat & Vulnerability Management
- Microsoft Defender for Cloud Apps

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220004&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR/Data%20Connectors/MicrosoftThreatProtection.JSON","true" -"DeviceProcessEvents","Microsoft Defender XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR","azuresentinel","azure-sentinel-solution-microsoft365defender","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftThreatProtection","Microsoft","Microsoft Defender XDR","Microsoft Defender XDR is a unified, natively integrated, pre- and post-breach enterprise defense suite that protects endpoint, identity, email, and applications and helps you detect, prevent, investigate, and automatically respond to sophisticated threats.

Microsoft Defender XDR suite includes:
- Microsoft Defender for Endpoint
- Microsoft Defender for Identity
- Microsoft Defender for Office 365
- Threat & Vulnerability Management
- Microsoft Defender for Cloud Apps

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220004&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR/Data%20Connectors/MicrosoftThreatProtection.JSON","true" -"DeviceRegistryEvents","Microsoft Defender XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR","azuresentinel","azure-sentinel-solution-microsoft365defender","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftThreatProtection","Microsoft","Microsoft Defender XDR","Microsoft Defender XDR is a unified, natively integrated, pre- and post-breach enterprise defense suite that protects endpoint, identity, email, and applications and helps you detect, prevent, investigate, and automatically respond to sophisticated threats.

Microsoft Defender XDR suite includes:
- Microsoft Defender for Endpoint
- Microsoft Defender for Identity
- Microsoft Defender for Office 365
- Threat & Vulnerability Management
- Microsoft Defender for Cloud Apps

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220004&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR/Data%20Connectors/MicrosoftThreatProtection.JSON","true" -"EmailAttachmentInfo","Microsoft Defender XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR","azuresentinel","azure-sentinel-solution-microsoft365defender","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftThreatProtection","Microsoft","Microsoft Defender XDR","Microsoft Defender XDR is a unified, natively integrated, pre- and post-breach enterprise defense suite that protects endpoint, identity, email, and applications and helps you detect, prevent, investigate, and automatically respond to sophisticated threats.

Microsoft Defender XDR suite includes:
- Microsoft Defender for Endpoint
- Microsoft Defender for Identity
- Microsoft Defender for Office 365
- Threat & Vulnerability Management
- Microsoft Defender for Cloud Apps

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220004&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR/Data%20Connectors/MicrosoftThreatProtection.JSON","true" -"EmailEvents","Microsoft Defender XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR","azuresentinel","azure-sentinel-solution-microsoft365defender","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftThreatProtection","Microsoft","Microsoft Defender XDR","Microsoft Defender XDR is a unified, natively integrated, pre- and post-breach enterprise defense suite that protects endpoint, identity, email, and applications and helps you detect, prevent, investigate, and automatically respond to sophisticated threats.

Microsoft Defender XDR suite includes:
- Microsoft Defender for Endpoint
- Microsoft Defender for Identity
- Microsoft Defender for Office 365
- Threat & Vulnerability Management
- Microsoft Defender for Cloud Apps

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220004&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR/Data%20Connectors/MicrosoftThreatProtection.JSON","true" -"EmailPostDeliveryEvents","Microsoft Defender XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR","azuresentinel","azure-sentinel-solution-microsoft365defender","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftThreatProtection","Microsoft","Microsoft Defender XDR","Microsoft Defender XDR is a unified, natively integrated, pre- and post-breach enterprise defense suite that protects endpoint, identity, email, and applications and helps you detect, prevent, investigate, and automatically respond to sophisticated threats.

Microsoft Defender XDR suite includes:
- Microsoft Defender for Endpoint
- Microsoft Defender for Identity
- Microsoft Defender for Office 365
- Threat & Vulnerability Management
- Microsoft Defender for Cloud Apps

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220004&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR/Data%20Connectors/MicrosoftThreatProtection.JSON","true" -"EmailUrlInfo","Microsoft Defender XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR","azuresentinel","azure-sentinel-solution-microsoft365defender","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftThreatProtection","Microsoft","Microsoft Defender XDR","Microsoft Defender XDR is a unified, natively integrated, pre- and post-breach enterprise defense suite that protects endpoint, identity, email, and applications and helps you detect, prevent, investigate, and automatically respond to sophisticated threats.

Microsoft Defender XDR suite includes:
- Microsoft Defender for Endpoint
- Microsoft Defender for Identity
- Microsoft Defender for Office 365
- Threat & Vulnerability Management
- Microsoft Defender for Cloud Apps

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220004&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR/Data%20Connectors/MicrosoftThreatProtection.JSON","true" -"IdentityDirectoryEvents","Microsoft Defender XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR","azuresentinel","azure-sentinel-solution-microsoft365defender","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftThreatProtection","Microsoft","Microsoft Defender XDR","Microsoft Defender XDR is a unified, natively integrated, pre- and post-breach enterprise defense suite that protects endpoint, identity, email, and applications and helps you detect, prevent, investigate, and automatically respond to sophisticated threats.

Microsoft Defender XDR suite includes:
- Microsoft Defender for Endpoint
- Microsoft Defender for Identity
- Microsoft Defender for Office 365
- Threat & Vulnerability Management
- Microsoft Defender for Cloud Apps

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220004&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR/Data%20Connectors/MicrosoftThreatProtection.JSON","true" -"IdentityLogonEvents","Microsoft Defender XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR","azuresentinel","azure-sentinel-solution-microsoft365defender","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftThreatProtection","Microsoft","Microsoft Defender XDR","Microsoft Defender XDR is a unified, natively integrated, pre- and post-breach enterprise defense suite that protects endpoint, identity, email, and applications and helps you detect, prevent, investigate, and automatically respond to sophisticated threats.

Microsoft Defender XDR suite includes:
- Microsoft Defender for Endpoint
- Microsoft Defender for Identity
- Microsoft Defender for Office 365
- Threat & Vulnerability Management
- Microsoft Defender for Cloud Apps

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220004&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR/Data%20Connectors/MicrosoftThreatProtection.JSON","true" -"IdentityQueryEvents","Microsoft Defender XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR","azuresentinel","azure-sentinel-solution-microsoft365defender","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftThreatProtection","Microsoft","Microsoft Defender XDR","Microsoft Defender XDR is a unified, natively integrated, pre- and post-breach enterprise defense suite that protects endpoint, identity, email, and applications and helps you detect, prevent, investigate, and automatically respond to sophisticated threats.

Microsoft Defender XDR suite includes:
- Microsoft Defender for Endpoint
- Microsoft Defender for Identity
- Microsoft Defender for Office 365
- Threat & Vulnerability Management
- Microsoft Defender for Cloud Apps

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220004&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR/Data%20Connectors/MicrosoftThreatProtection.JSON","true" -"SecurityAlert","Microsoft Defender XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR","azuresentinel","azure-sentinel-solution-microsoft365defender","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftThreatProtection","Microsoft","Microsoft Defender XDR","Microsoft Defender XDR is a unified, natively integrated, pre- and post-breach enterprise defense suite that protects endpoint, identity, email, and applications and helps you detect, prevent, investigate, and automatically respond to sophisticated threats.

Microsoft Defender XDR suite includes:
- Microsoft Defender for Endpoint
- Microsoft Defender for Identity
- Microsoft Defender for Office 365
- Threat & Vulnerability Management
- Microsoft Defender for Cloud Apps

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220004&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR/Data%20Connectors/MicrosoftThreatProtection.JSON","true" -"SecurityIncident","Microsoft Defender XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR","azuresentinel","azure-sentinel-solution-microsoft365defender","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftThreatProtection","Microsoft","Microsoft Defender XDR","Microsoft Defender XDR is a unified, natively integrated, pre- and post-breach enterprise defense suite that protects endpoint, identity, email, and applications and helps you detect, prevent, investigate, and automatically respond to sophisticated threats.

Microsoft Defender XDR suite includes:
- Microsoft Defender for Endpoint
- Microsoft Defender for Identity
- Microsoft Defender for Office 365
- Threat & Vulnerability Management
- Microsoft Defender for Cloud Apps

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220004&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR/Data%20Connectors/MicrosoftThreatProtection.JSON","true" -"UrlClickEvents","Microsoft Defender XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR","azuresentinel","azure-sentinel-solution-microsoft365defender","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftThreatProtection","Microsoft","Microsoft Defender XDR","Microsoft Defender XDR is a unified, natively integrated, pre- and post-breach enterprise defense suite that protects endpoint, identity, email, and applications and helps you detect, prevent, investigate, and automatically respond to sophisticated threats.

Microsoft Defender XDR suite includes:
- Microsoft Defender for Endpoint
- Microsoft Defender for Identity
- Microsoft Defender for Office 365
- Threat & Vulnerability Management
- Microsoft Defender for Cloud Apps

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220004&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR/Data%20Connectors/MicrosoftThreatProtection.JSON","true" -"SecurityAlert","Microsoft Defender for Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20for%20Cloud","azuresentinel","azure-sentinel-solution-microsoftdefenderforcloud","2022-05-17","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AzureSecurityCenter","Microsoft","Subscription-based Microsoft Defender for Cloud (Legacy)","Microsoft Defender for Cloud is a security management tool that allows you to detect and quickly respond to threats across Azure, hybrid, and multi-cloud workloads. This connector allows you to stream your security alerts from Microsoft Defender for Cloud into Microsoft Sentinel, so you can view Defender data in workbooks, query it to produce alerts, and investigate and respond to incidents.

[For more information>](https://aka.ms/ASC-Connector)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20for%20Cloud/Data%20Connectors/AzureSecurityCenter.JSON","true" -"SecurityAlert","Microsoft Defender for Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20for%20Cloud","azuresentinel","azure-sentinel-solution-microsoftdefenderforcloud","2022-05-17","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftDefenderForCloudTenantBased","Microsoft","Tenant-based Microsoft Defender for Cloud","Microsoft Defender for Cloud is a security management tool that allows you to detect and quickly respond to threats across Azure, hybrid, and multi-cloud workloads. This connector allows you to stream your MDC security alerts from Microsoft 365 Defender into Microsoft Sentinel, so you can can leverage the advantages of XDR correlations connecting the dots across your cloud resources, devices and identities and view the data in workbooks, queries and investigate and respond to incidents. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2269832&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20for%20Cloud/Data%20Connectors/MicrosoftDefenderForCloudTenantBased.json","true" -"McasShadowItReporting","Microsoft Defender for Cloud Apps","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20for%20Cloud%20Apps","azuresentinel","azure-sentinel-solution-microsoftdefendercloudapps","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftCloudAppSecurity","Microsoft","Microsoft Defender for Cloud Apps","By connecting with [Microsoft Defender for Cloud Apps](https://aka.ms/asi-mcas-connector-description) you will gain visibility into your cloud apps, get sophisticated analytics to identify and combat cyberthreats, and control how your data travels.

- Identify shadow IT cloud apps on your network.
- Control and limit access based on conditions and session context.
- Use built-in or custom policies for data sharing and data loss prevention.
- Identify high-risk use and get alerts for unusual user activities with Microsoft behavioral analytics and anomaly detection capabilities, including ransomware activity, impossible travel, suspicious email forwarding rules, and mass download of files.
- Mass download of files

[Deploy now >](https://aka.ms/asi-mcas-connector-deploynow)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20for%20Cloud%20Apps/Data%20Connectors/MicrosoftCloudAppSecurity.JSON","true" -"SecurityAlert","Microsoft Defender for Cloud Apps","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20for%20Cloud%20Apps","azuresentinel","azure-sentinel-solution-microsoftdefendercloudapps","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftCloudAppSecurity","Microsoft","Microsoft Defender for Cloud Apps","By connecting with [Microsoft Defender for Cloud Apps](https://aka.ms/asi-mcas-connector-description) you will gain visibility into your cloud apps, get sophisticated analytics to identify and combat cyberthreats, and control how your data travels.

- Identify shadow IT cloud apps on your network.
- Control and limit access based on conditions and session context.
- Use built-in or custom policies for data sharing and data loss prevention.
- Identify high-risk use and get alerts for unusual user activities with Microsoft behavioral analytics and anomaly detection capabilities, including ransomware activity, impossible travel, suspicious email forwarding rules, and mass download of files.
- Mass download of files

[Deploy now >](https://aka.ms/asi-mcas-connector-deploynow)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20for%20Cloud%20Apps/Data%20Connectors/MicrosoftCloudAppSecurity.JSON","true" -"discoveryLogs","Microsoft Defender for Cloud Apps","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20for%20Cloud%20Apps","azuresentinel","azure-sentinel-solution-microsoftdefendercloudapps","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftCloudAppSecurity","Microsoft","Microsoft Defender for Cloud Apps","By connecting with [Microsoft Defender for Cloud Apps](https://aka.ms/asi-mcas-connector-description) you will gain visibility into your cloud apps, get sophisticated analytics to identify and combat cyberthreats, and control how your data travels.

- Identify shadow IT cloud apps on your network.
- Control and limit access based on conditions and session context.
- Use built-in or custom policies for data sharing and data loss prevention.
- Identify high-risk use and get alerts for unusual user activities with Microsoft behavioral analytics and anomaly detection capabilities, including ransomware activity, impossible travel, suspicious email forwarding rules, and mass download of files.
- Mass download of files

[Deploy now >](https://aka.ms/asi-mcas-connector-deploynow)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20for%20Cloud%20Apps/Data%20Connectors/MicrosoftCloudAppSecurity.JSON","true" -"SecurityAlert","Microsoft Defender for Office 365","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20for%20Office%20365","azuresentinel","azure-sentinel-solution-microsoftdefenderforo365","2022-05-17","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","OfficeATP","Microsoft","Microsoft Defender for Office 365 (Preview)","Microsoft Defender for Office 365 safeguards your organization against malicious threats posed by email messages, links (URLs) and collaboration tools. By ingesting Microsoft Defender for Office 365 alerts into Microsoft Sentinel, you can incorporate information about email- and URL-based threats into your broader risk analysis and build response scenarios accordingly.

The following types of alerts will be imported:

- A potentially malicious URL click was detected
- Email messages containing malware removed after delivery
- Email messages containing phish URLs removed after delivery
- Email reported by user as malware or phish
- Suspicious email sending patterns detected
- User restricted from sending email

These alerts can be seen by Office customers in the ** Office Security and Compliance Center**.

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2219942&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20for%20Office%20365/Data%20Connectors/template_OfficeATP.json","true" -"AADManagedIdentitySignInLogs","Microsoft Entra ID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID","azuresentinel","azure-sentinel-solution-azureactivedirectory","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureActiveDirectory","Microsoft","Microsoft Entra ID","Gain insights into Microsoft Entra ID by connecting Audit and Sign-in logs to Microsoft Sentinel to gather insights around Microsoft Entra ID scenarios. You can learn about app usage, conditional access policies, legacy auth relate details using our Sign-in logs. You can get information on your Self Service Password Reset (SSPR) usage, Microsoft Entra ID Management activities like user, group, role, app management using our Audit logs table. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/?linkid=2219715&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID/Data%20Connectors/template_AzureActiveDirectory.JSON","true" -"AADNonInteractiveUserSignInLogs","Microsoft Entra ID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID","azuresentinel","azure-sentinel-solution-azureactivedirectory","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureActiveDirectory","Microsoft","Microsoft Entra ID","Gain insights into Microsoft Entra ID by connecting Audit and Sign-in logs to Microsoft Sentinel to gather insights around Microsoft Entra ID scenarios. You can learn about app usage, conditional access policies, legacy auth relate details using our Sign-in logs. You can get information on your Self Service Password Reset (SSPR) usage, Microsoft Entra ID Management activities like user, group, role, app management using our Audit logs table. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/?linkid=2219715&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID/Data%20Connectors/template_AzureActiveDirectory.JSON","true" -"AADProvisioningLogs","Microsoft Entra ID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID","azuresentinel","azure-sentinel-solution-azureactivedirectory","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureActiveDirectory","Microsoft","Microsoft Entra ID","Gain insights into Microsoft Entra ID by connecting Audit and Sign-in logs to Microsoft Sentinel to gather insights around Microsoft Entra ID scenarios. You can learn about app usage, conditional access policies, legacy auth relate details using our Sign-in logs. You can get information on your Self Service Password Reset (SSPR) usage, Microsoft Entra ID Management activities like user, group, role, app management using our Audit logs table. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/?linkid=2219715&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID/Data%20Connectors/template_AzureActiveDirectory.JSON","true" -"AADRiskyServicePrincipals","Microsoft Entra ID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID","azuresentinel","azure-sentinel-solution-azureactivedirectory","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureActiveDirectory","Microsoft","Microsoft Entra ID","Gain insights into Microsoft Entra ID by connecting Audit and Sign-in logs to Microsoft Sentinel to gather insights around Microsoft Entra ID scenarios. You can learn about app usage, conditional access policies, legacy auth relate details using our Sign-in logs. You can get information on your Self Service Password Reset (SSPR) usage, Microsoft Entra ID Management activities like user, group, role, app management using our Audit logs table. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/?linkid=2219715&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID/Data%20Connectors/template_AzureActiveDirectory.JSON","true" -"AADRiskyUsers","Microsoft Entra ID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID","azuresentinel","azure-sentinel-solution-azureactivedirectory","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureActiveDirectory","Microsoft","Microsoft Entra ID","Gain insights into Microsoft Entra ID by connecting Audit and Sign-in logs to Microsoft Sentinel to gather insights around Microsoft Entra ID scenarios. You can learn about app usage, conditional access policies, legacy auth relate details using our Sign-in logs. You can get information on your Self Service Password Reset (SSPR) usage, Microsoft Entra ID Management activities like user, group, role, app management using our Audit logs table. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/?linkid=2219715&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID/Data%20Connectors/template_AzureActiveDirectory.JSON","true" -"AADServicePrincipalRiskEvents","Microsoft Entra ID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID","azuresentinel","azure-sentinel-solution-azureactivedirectory","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureActiveDirectory","Microsoft","Microsoft Entra ID","Gain insights into Microsoft Entra ID by connecting Audit and Sign-in logs to Microsoft Sentinel to gather insights around Microsoft Entra ID scenarios. You can learn about app usage, conditional access policies, legacy auth relate details using our Sign-in logs. You can get information on your Self Service Password Reset (SSPR) usage, Microsoft Entra ID Management activities like user, group, role, app management using our Audit logs table. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/?linkid=2219715&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID/Data%20Connectors/template_AzureActiveDirectory.JSON","true" -"AADServicePrincipalSignInLogs","Microsoft Entra ID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID","azuresentinel","azure-sentinel-solution-azureactivedirectory","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureActiveDirectory","Microsoft","Microsoft Entra ID","Gain insights into Microsoft Entra ID by connecting Audit and Sign-in logs to Microsoft Sentinel to gather insights around Microsoft Entra ID scenarios. You can learn about app usage, conditional access policies, legacy auth relate details using our Sign-in logs. You can get information on your Self Service Password Reset (SSPR) usage, Microsoft Entra ID Management activities like user, group, role, app management using our Audit logs table. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/?linkid=2219715&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID/Data%20Connectors/template_AzureActiveDirectory.JSON","true" -"AADUserRiskEvents","Microsoft Entra ID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID","azuresentinel","azure-sentinel-solution-azureactivedirectory","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureActiveDirectory","Microsoft","Microsoft Entra ID","Gain insights into Microsoft Entra ID by connecting Audit and Sign-in logs to Microsoft Sentinel to gather insights around Microsoft Entra ID scenarios. You can learn about app usage, conditional access policies, legacy auth relate details using our Sign-in logs. You can get information on your Self Service Password Reset (SSPR) usage, Microsoft Entra ID Management activities like user, group, role, app management using our Audit logs table. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/?linkid=2219715&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID/Data%20Connectors/template_AzureActiveDirectory.JSON","true" -"ADFSSignInLogs","Microsoft Entra ID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID","azuresentinel","azure-sentinel-solution-azureactivedirectory","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureActiveDirectory","Microsoft","Microsoft Entra ID","Gain insights into Microsoft Entra ID by connecting Audit and Sign-in logs to Microsoft Sentinel to gather insights around Microsoft Entra ID scenarios. You can learn about app usage, conditional access policies, legacy auth relate details using our Sign-in logs. You can get information on your Self Service Password Reset (SSPR) usage, Microsoft Entra ID Management activities like user, group, role, app management using our Audit logs table. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/?linkid=2219715&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID/Data%20Connectors/template_AzureActiveDirectory.JSON","true" -"AuditLogs","Microsoft Entra ID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID","azuresentinel","azure-sentinel-solution-azureactivedirectory","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureActiveDirectory","Microsoft","Microsoft Entra ID","Gain insights into Microsoft Entra ID by connecting Audit and Sign-in logs to Microsoft Sentinel to gather insights around Microsoft Entra ID scenarios. You can learn about app usage, conditional access policies, legacy auth relate details using our Sign-in logs. You can get information on your Self Service Password Reset (SSPR) usage, Microsoft Entra ID Management activities like user, group, role, app management using our Audit logs table. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/?linkid=2219715&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID/Data%20Connectors/template_AzureActiveDirectory.JSON","true" -"ManagedIdentitySignInLogs","Microsoft Entra ID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID","azuresentinel","azure-sentinel-solution-azureactivedirectory","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureActiveDirectory","Microsoft","Microsoft Entra ID","Gain insights into Microsoft Entra ID by connecting Audit and Sign-in logs to Microsoft Sentinel to gather insights around Microsoft Entra ID scenarios. You can learn about app usage, conditional access policies, legacy auth relate details using our Sign-in logs. You can get information on your Self Service Password Reset (SSPR) usage, Microsoft Entra ID Management activities like user, group, role, app management using our Audit logs table. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/?linkid=2219715&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID/Data%20Connectors/template_AzureActiveDirectory.JSON","true" -"NetworkAccessTraffic","Microsoft Entra ID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID","azuresentinel","azure-sentinel-solution-azureactivedirectory","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureActiveDirectory","Microsoft","Microsoft Entra ID","Gain insights into Microsoft Entra ID by connecting Audit and Sign-in logs to Microsoft Sentinel to gather insights around Microsoft Entra ID scenarios. You can learn about app usage, conditional access policies, legacy auth relate details using our Sign-in logs. You can get information on your Self Service Password Reset (SSPR) usage, Microsoft Entra ID Management activities like user, group, role, app management using our Audit logs table. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/?linkid=2219715&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID/Data%20Connectors/template_AzureActiveDirectory.JSON","true" -"NetworkAccessTrafficLogs","Microsoft Entra ID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID","azuresentinel","azure-sentinel-solution-azureactivedirectory","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureActiveDirectory","Microsoft","Microsoft Entra ID","Gain insights into Microsoft Entra ID by connecting Audit and Sign-in logs to Microsoft Sentinel to gather insights around Microsoft Entra ID scenarios. You can learn about app usage, conditional access policies, legacy auth relate details using our Sign-in logs. You can get information on your Self Service Password Reset (SSPR) usage, Microsoft Entra ID Management activities like user, group, role, app management using our Audit logs table. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/?linkid=2219715&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID/Data%20Connectors/template_AzureActiveDirectory.JSON","true" -"NonInteractiveUserSignInLogs","Microsoft Entra ID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID","azuresentinel","azure-sentinel-solution-azureactivedirectory","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureActiveDirectory","Microsoft","Microsoft Entra ID","Gain insights into Microsoft Entra ID by connecting Audit and Sign-in logs to Microsoft Sentinel to gather insights around Microsoft Entra ID scenarios. You can learn about app usage, conditional access policies, legacy auth relate details using our Sign-in logs. You can get information on your Self Service Password Reset (SSPR) usage, Microsoft Entra ID Management activities like user, group, role, app management using our Audit logs table. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/?linkid=2219715&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID/Data%20Connectors/template_AzureActiveDirectory.JSON","true" -"ProvisioningLogs","Microsoft Entra ID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID","azuresentinel","azure-sentinel-solution-azureactivedirectory","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureActiveDirectory","Microsoft","Microsoft Entra ID","Gain insights into Microsoft Entra ID by connecting Audit and Sign-in logs to Microsoft Sentinel to gather insights around Microsoft Entra ID scenarios. You can learn about app usage, conditional access policies, legacy auth relate details using our Sign-in logs. You can get information on your Self Service Password Reset (SSPR) usage, Microsoft Entra ID Management activities like user, group, role, app management using our Audit logs table. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/?linkid=2219715&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID/Data%20Connectors/template_AzureActiveDirectory.JSON","true" -"RiskyServicePrincipals","Microsoft Entra ID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID","azuresentinel","azure-sentinel-solution-azureactivedirectory","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureActiveDirectory","Microsoft","Microsoft Entra ID","Gain insights into Microsoft Entra ID by connecting Audit and Sign-in logs to Microsoft Sentinel to gather insights around Microsoft Entra ID scenarios. You can learn about app usage, conditional access policies, legacy auth relate details using our Sign-in logs. You can get information on your Self Service Password Reset (SSPR) usage, Microsoft Entra ID Management activities like user, group, role, app management using our Audit logs table. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/?linkid=2219715&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID/Data%20Connectors/template_AzureActiveDirectory.JSON","true" -"RiskyUsers","Microsoft Entra ID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID","azuresentinel","azure-sentinel-solution-azureactivedirectory","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureActiveDirectory","Microsoft","Microsoft Entra ID","Gain insights into Microsoft Entra ID by connecting Audit and Sign-in logs to Microsoft Sentinel to gather insights around Microsoft Entra ID scenarios. You can learn about app usage, conditional access policies, legacy auth relate details using our Sign-in logs. You can get information on your Self Service Password Reset (SSPR) usage, Microsoft Entra ID Management activities like user, group, role, app management using our Audit logs table. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/?linkid=2219715&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID/Data%20Connectors/template_AzureActiveDirectory.JSON","true" -"ServicePrincipalRiskEvents","Microsoft Entra ID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID","azuresentinel","azure-sentinel-solution-azureactivedirectory","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureActiveDirectory","Microsoft","Microsoft Entra ID","Gain insights into Microsoft Entra ID by connecting Audit and Sign-in logs to Microsoft Sentinel to gather insights around Microsoft Entra ID scenarios. You can learn about app usage, conditional access policies, legacy auth relate details using our Sign-in logs. You can get information on your Self Service Password Reset (SSPR) usage, Microsoft Entra ID Management activities like user, group, role, app management using our Audit logs table. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/?linkid=2219715&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID/Data%20Connectors/template_AzureActiveDirectory.JSON","true" -"ServicePrincipalSignInLogs","Microsoft Entra ID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID","azuresentinel","azure-sentinel-solution-azureactivedirectory","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureActiveDirectory","Microsoft","Microsoft Entra ID","Gain insights into Microsoft Entra ID by connecting Audit and Sign-in logs to Microsoft Sentinel to gather insights around Microsoft Entra ID scenarios. You can learn about app usage, conditional access policies, legacy auth relate details using our Sign-in logs. You can get information on your Self Service Password Reset (SSPR) usage, Microsoft Entra ID Management activities like user, group, role, app management using our Audit logs table. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/?linkid=2219715&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID/Data%20Connectors/template_AzureActiveDirectory.JSON","true" -"SignInLogs","Microsoft Entra ID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID","azuresentinel","azure-sentinel-solution-azureactivedirectory","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureActiveDirectory","Microsoft","Microsoft Entra ID","Gain insights into Microsoft Entra ID by connecting Audit and Sign-in logs to Microsoft Sentinel to gather insights around Microsoft Entra ID scenarios. You can learn about app usage, conditional access policies, legacy auth relate details using our Sign-in logs. You can get information on your Self Service Password Reset (SSPR) usage, Microsoft Entra ID Management activities like user, group, role, app management using our Audit logs table. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/?linkid=2219715&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID/Data%20Connectors/template_AzureActiveDirectory.JSON","true" -"SigninLogs","Microsoft Entra ID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID","azuresentinel","azure-sentinel-solution-azureactivedirectory","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureActiveDirectory","Microsoft","Microsoft Entra ID","Gain insights into Microsoft Entra ID by connecting Audit and Sign-in logs to Microsoft Sentinel to gather insights around Microsoft Entra ID scenarios. You can learn about app usage, conditional access policies, legacy auth relate details using our Sign-in logs. You can get information on your Self Service Password Reset (SSPR) usage, Microsoft Entra ID Management activities like user, group, role, app management using our Audit logs table. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/?linkid=2219715&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID/Data%20Connectors/template_AzureActiveDirectory.JSON","true" -"UserRiskEvents","Microsoft Entra ID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID","azuresentinel","azure-sentinel-solution-azureactivedirectory","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureActiveDirectory","Microsoft","Microsoft Entra ID","Gain insights into Microsoft Entra ID by connecting Audit and Sign-in logs to Microsoft Sentinel to gather insights around Microsoft Entra ID scenarios. You can learn about app usage, conditional access policies, legacy auth relate details using our Sign-in logs. You can get information on your Self Service Password Reset (SSPR) usage, Microsoft Entra ID Management activities like user, group, role, app management using our Audit logs table. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/?linkid=2219715&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID/Data%20Connectors/template_AzureActiveDirectory.JSON","true" -"SecurityAlert","Microsoft Entra ID Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID%20Protection","azuresentinel","azure-sentinel-solution-azureactivedirectoryip","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureActiveDirectoryIdentityProtection","Microsoft","Microsoft Entra ID Protection","Microsoft Entra ID Protection provides a consolidated view at risk users, risk events and vulnerabilities, with the ability to remediate risk immediately, and set policies to auto-remediate future events. The service is built on Microsoft’s experience protecting consumer identities and gains tremendous accuracy from the signal from over 13 billion logins a day. Integrate Microsoft Microsoft Entra ID Protection alerts with Microsoft Sentinel to view dashboards, create custom alerts, and improve investigation. For more information, see the [Microsoft Sentinel documentation ](https://go.microsoft.com/fwlink/p/?linkid=2220065&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).

[Get Microsoft Entra ID Premium P1/P2 ](https://aka.ms/asi-ipcconnectorgetlink)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID%20Protection/Data%20Connectors/template_AzureActiveDirectoryIdentityProtection.JSON","true" -"Event","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-ExchangeAdminAuditLogEvents","Microsoft","[Deprecated] Microsoft Exchange Logs and Events","Deprecated, use the 'ESI-Opt' dataconnectors. You can stream all Exchange Audit events, IIS Logs, HTTP Proxy logs and Security Event logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to view dashboards, create custom alerts, and improve investigation. This is used by Microsoft Exchange Security Workbooks to provide security insights of your On-Premises Exchange environment","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-ExchangeAdminAuditLogEvents.json","true" -"ExchangeHttpProxy_CL","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-ExchangeAdminAuditLogEvents","Microsoft","[Deprecated] Microsoft Exchange Logs and Events","Deprecated, use the 'ESI-Opt' dataconnectors. You can stream all Exchange Audit events, IIS Logs, HTTP Proxy logs and Security Event logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to view dashboards, create custom alerts, and improve investigation. This is used by Microsoft Exchange Security Workbooks to provide security insights of your On-Premises Exchange environment","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-ExchangeAdminAuditLogEvents.json","true" -"MessageTrackingLog_CL","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-ExchangeAdminAuditLogEvents","Microsoft","[Deprecated] Microsoft Exchange Logs and Events","Deprecated, use the 'ESI-Opt' dataconnectors. You can stream all Exchange Audit events, IIS Logs, HTTP Proxy logs and Security Event logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to view dashboards, create custom alerts, and improve investigation. This is used by Microsoft Exchange Security Workbooks to provide security insights of your On-Premises Exchange environment","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-ExchangeAdminAuditLogEvents.json","true" -"SecurityEvent","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-ExchangeAdminAuditLogEvents","Microsoft","[Deprecated] Microsoft Exchange Logs and Events","Deprecated, use the 'ESI-Opt' dataconnectors. You can stream all Exchange Audit events, IIS Logs, HTTP Proxy logs and Security Event logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to view dashboards, create custom alerts, and improve investigation. This is used by Microsoft Exchange Security Workbooks to provide security insights of your On-Premises Exchange environment","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-ExchangeAdminAuditLogEvents.json","true" -"W3CIISLog","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-ExchangeAdminAuditLogEvents","Microsoft","[Deprecated] Microsoft Exchange Logs and Events","Deprecated, use the 'ESI-Opt' dataconnectors. You can stream all Exchange Audit events, IIS Logs, HTTP Proxy logs and Security Event logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to view dashboards, create custom alerts, and improve investigation. This is used by Microsoft Exchange Security Workbooks to provide security insights of your On-Premises Exchange environment","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-ExchangeAdminAuditLogEvents.json","true" -"ESIExchangeConfig_CL","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-ExchangeOnPremisesCollector","Microsoft","Exchange Security Insights On-Premises Collector","Connector used to push Exchange On-Premises Security configuration for Microsoft Sentinel Analysis","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-ExchangeOnPremisesCollector.json","true" -"Event","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-Opt1ExchangeAdminAuditLogsByEventLogs","Microsoft","Microsoft Exchange Admin Audit Logs by Event Logs","[Option 1] - Using Azure Monitor Agent - You can stream all Exchange Audit events from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to view dashboards, create custom alerts, and improve investigation. This is used by Microsoft Exchange Security Workbooks to provide security insights of your On-Premises Exchange environment","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-Opt1ExchangeAdminAuditLogsByEventLogs.json","true" -"Event","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-Opt2ExchangeServersEventLogs","Microsoft","Microsoft Exchange Logs and Events","[Option 2] - Using Azure Monitor Agent - You can stream all Exchange Security & Application Event logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to create custom alerts, and improve investigation.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-Opt2ExchangeServersEventLogs.json","true" -"SecurityEvent","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-Opt34DomainControllersSecurityEventLogs","Microsoft"," Microsoft Active-Directory Domain Controllers Security Event Logs","[Option 3 & 4] - Using Azure Monitor Agent -You can stream a part or all Domain Controllers Security Event logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to create custom alerts, and improve investigation.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-Opt34DomainControllersSecurityEventLogs.json","true" -"W3CIISLog","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-Opt5ExchangeIISLogs","Microsoft","IIS Logs of Microsoft Exchange Servers","[Option 5] - Using Azure Monitor Agent - You can stream all IIS Logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to create custom alerts, and improve investigation.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-Opt5ExchangeIISLogs.json","true" -"MessageTrackingLog_CL","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-Opt6ExchangeMessageTrackingLogs","Microsoft","Microsoft Exchange Message Tracking Logs","[Option 6] - Using Azure Monitor Agent - You can stream all Exchange Message Tracking from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. Those logs can be used to track the flow of messages in your Exchange environment. This data connector is based on the option 6 of the [Microsoft Exchange Security wiki](https://aka.ms/ESI_DataConnectorOptions).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-Opt6ExchangeMessageTrackingLogs.json","true" -"ExchangeHttpProxy_CL","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-Opt7ExchangeHTTPProxyLogs","Microsoft","Microsoft Exchange HTTP Proxy Logs","[Option 7] - Using Azure Monitor Agent - You can stream HTTP Proxy logs and Security Event logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you create custom alerts, and improve investigation. [Learn more](https://aka.ms/ESI_DataConnectorOptions)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-Opt7ExchangeHTTPProxyLogs.json","true" -"ESIExchangeOnlineConfig_CL","Microsoft Exchange Security - Exchange Online","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20Online","microsoftsentinelcommunity","azure-sentinel-solution-esionline","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-ExchangeOnlineCollector","Microsoft","Exchange Security Insights Online Collector","Connector used to push Exchange Online Security configuration for Microsoft Sentinel Analysis","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20Online/Data%20Connectors/ESI-ExchangeOnlineCollector.json","true" -"PowerBIActivity","Microsoft PowerBI","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20PowerBI","azuresentinel","azure-sentinel-solution-microsoftpowerbi","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OfficePowerBI","Microsoft","Microsoft PowerBI","Microsoft PowerBI is a collection of software services, apps, and connectors that work together to turn your unrelated sources of data into coherent, visually immersive, and interactive insights. Your data may be an Excel spreadsheet, a collection of cloud-based and on-premises hybrid data warehouses, or a data store of some other type. This connector lets you stream PowerBI audit logs into Microsoft Sentinel, allowing you to track user activities in your PowerBI environment. You can filter the audit data by date range, user, dashboard, report, dataset, and activity type.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20PowerBI/Data%20Connectors/template_OfficePowerBI.json","true" -"ProjectActivity","Microsoft Project","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Project","azuresentinel","azure-sentinel-solution-microsoftproject","2022-05-23","","","Microsoft","Microsoft","https://support.microsoft.com","","domains","Office365Project","Microsoft","Microsoft Project","Microsoft Project (MSP) is a project management software solution. Depending on your plan, Microsoft Project lets you plan projects, assign tasks, manage resources, create reports and more. This connector allows you to stream your Azure Project audit logs into Microsoft Sentinel in order to track your project activities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Project/Data%20Connectors/template_Office365Project.JSON","true" -"PurviewDataSensitivityLogs","Microsoft Purview","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Purview","azuresentinel","azure-sentinel-solution-azurepurview","2021-11-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftAzurePurview","Microsoft","Microsoft Purview","Connect to Microsoft Purview to enable data sensitivity enrichment of Microsoft Sentinel. Data classification and sensitivity label logs from Microsoft Purview scans can be ingested and visualized through workbooks, analytical rules, and more. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2224125&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Purview/Data%20Connectors/MicrosoftPurview.json","true" -"MicrosoftPurviewInformationProtection","Microsoft Purview Information Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Purview%20Information%20Protection","azuresentinel","azure-sentinel-solution-mip","2023-01-06","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftPurviewInformationProtection","Microsoft","Microsoft Purview Information Protection","Microsoft Purview Information Protection helps you discover, classify, protect, and govern sensitive information wherever it lives or travels. Using these capabilities enable you to know your data, identify items that are sensitive and gain visibility into how they are being used to better protect your data. Sensitivity labels are the foundational capability that provide protection actions, applying encryption, access restrictions and visual markings.
Integrate Microsoft Purview Information Protection logs with Microsoft Sentinel to view dashboards, create custom alerts and improve investigation. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2223811&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Purview%20Information%20Protection/Data%20Connectors/MicrosoftPurviewInformationProtection.json","true" -"Syslog","Microsoft Sysmon For Linux","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Sysmon%20For%20Linux","azuresentinel","azure-sentinel-solution-sysmonforlinux","2021-10-27","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftSysmonForLinux","Microsoft","[Deprecated] Microsoft Sysmon For Linux","[Sysmon for Linux](https://github.com/Sysinternals/SysmonForLinux) provides detailed information about process creations, network connections and other system events.
[Sysmon for linux link:]. The Sysmon for Linux connector uses [Syslog](https://aka.ms/sysLogInfo) as its data ingestion method. This solution depends on ASIM to work as expected. [Deploy ASIM](https://aka.ms/DeployASIM) to get the full value from the solution.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Sysmon%20For%20Linux/Data%20Connectors/SysmonForLinux.json","true" -"vimProcessCreateLinuxSysmon","Microsoft Sysmon For Linux","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Sysmon%20For%20Linux","azuresentinel","azure-sentinel-solution-sysmonforlinux","2021-10-27","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftSysmonForLinux","Microsoft","[Deprecated] Microsoft Sysmon For Linux","[Sysmon for Linux](https://github.com/Sysinternals/SysmonForLinux) provides detailed information about process creations, network connections and other system events.
[Sysmon for linux link:]. The Sysmon for Linux connector uses [Syslog](https://aka.ms/sysLogInfo) as its data ingestion method. This solution depends on ASIM to work as expected. [Deploy ASIM](https://aka.ms/DeployASIM) to get the full value from the solution.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Sysmon%20For%20Linux/Data%20Connectors/SysmonForLinux.json","true" -"","Microsoft Windows SQL Server Database Audit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Windows%20SQL%20Server%20Database%20Audit","microsoftsentinelcommunity","azure-sentinel-solution-sqlserverdatabaseaudit","2022-11-29","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","","","","","","false" -"SecurityAlert","MicrosoftDefenderForEndpoint","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MicrosoftDefenderForEndpoint","azuresentinel","azure-sentinel-solution-microsoftdefenderendpoint","2022-01-31","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftDefenderAdvancedThreatProtection","Microsoft","Microsoft Defender for Endpoint","Microsoft Defender for Endpoint is a security platform designed to prevent, detect, investigate, and respond to advanced threats. The platform creates alerts when suspicious security events are seen in an organization. Fetch alerts generated in Microsoft Defender for Endpoint to Microsoft Sentinel so that you can effectively analyze security events. You can create rules, build dashboards and author playbooks for immediate response. For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2220128&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MicrosoftDefenderForEndpoint/Data%20Connectors/template_MicrosoftDefenderAdvancedThreatProtection.JSON","true" -"SecurityAlert","MicrosoftPurviewInsiderRiskManagement","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MicrosoftPurviewInsiderRiskManagement","azuresentinel","azure-sentinel-solution-insiderriskmanagement","2021-10-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OfficeIRM","Microsoft","Microsoft 365 Insider Risk Management","Microsoft 365 Insider Risk Management is a compliance solution in Microsoft 365 that helps minimize internal risks by enabling you to detect, investigate, and act on malicious and inadvertent activities in your organization. Risk analysts in your organization can quickly take appropriate actions to make sure users are compliant with your organization's compliance standards.

Insider risk policies allow you to:

- define the types of risks you want to identify and detect in your organization.
- decide on what actions to take in response, including escalating cases to Microsoft Advanced eDiscovery if needed.

This solution produces alerts that can be seen by Office customers in the Insider Risk Management solution in Microsoft 365 Compliance Center.
[Learn More](https://aka.ms/OfficeIRMConnector) about Insider Risk Management.

These alerts can be imported into Microsoft Sentinel with this connector, allowing you to see, investigate, and respond to them in a broader organizational threat context. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2223721&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MicrosoftPurviewInsiderRiskManagement/Data%20Connectors/template_OfficeIRM.JSON","true" -"Awareness_Performance_Details_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastATAPI","Mimecast","Mimecast Awareness Training","The data connector for [Mimecast Awareness Training](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- Performance Details
- Safe Score Details
- User Data
- Watchlist Details
","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastAT/Mimecast_AT_FunctionApp.json","true" -"Awareness_SafeScore_Details_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastATAPI","Mimecast","Mimecast Awareness Training","The data connector for [Mimecast Awareness Training](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- Performance Details
- Safe Score Details
- User Data
- Watchlist Details
","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastAT/Mimecast_AT_FunctionApp.json","true" -"Awareness_User_Data_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastATAPI","Mimecast","Mimecast Awareness Training","The data connector for [Mimecast Awareness Training](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- Performance Details
- Safe Score Details
- User Data
- Watchlist Details
","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastAT/Mimecast_AT_FunctionApp.json","true" -"Awareness_Watchlist_Details_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastATAPI","Mimecast","Mimecast Awareness Training","The data connector for [Mimecast Awareness Training](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- Performance Details
- Safe Score Details
- User Data
- Watchlist Details
","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastAT/Mimecast_AT_FunctionApp.json","true" -"Audit_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastAuditAPI","Mimecast","Mimecast Audit","The data connector for [Mimecast Audit](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to audit and authentication events within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into user activity, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
Audit
","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastAudit/Mimecast_Audit_FunctionApp.json","true" -"Cloud_Integrated_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastCIAPI","Mimecast","Mimecast Cloud Integrated","The data connector for [Mimecast Cloud Integrated](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Cloud Integrated inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastCloudIntegrated/Mimecast_Cloud_Integrated_FunctionApp.json","true" -"Seg_Cg_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastSEGAPI","Mimecast","Mimecast Secure Email Gateway","The data connector for [Mimecast Secure Email Gateway](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) allows easy log collection from the Secure Email Gateway to surface email insight and user activity within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities. Mimecast products and features required:
- Mimecast Cloud Gateway
- Mimecast Data Leak Prevention
","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastSEG/Mimecast_SEG_FunctionApp.json","true" -"Seg_Dlp_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastSEGAPI","Mimecast","Mimecast Secure Email Gateway","The data connector for [Mimecast Secure Email Gateway](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) allows easy log collection from the Secure Email Gateway to surface email insight and user activity within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities. Mimecast products and features required:
- Mimecast Cloud Gateway
- Mimecast Data Leak Prevention
","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastSEG/Mimecast_SEG_FunctionApp.json","true" -"Ttp_Attachment_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastTTPAPI","Mimecast","Mimecast Targeted Threat Protection","The data connector for [Mimecast Targeted Threat Protection](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- URL Protect
- Impersonation Protect
- Attachment Protect
","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastTTP/Mimecast_TTP_FunctionApp.json","true" -"Ttp_Impersonation_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastTTPAPI","Mimecast","Mimecast Targeted Threat Protection","The data connector for [Mimecast Targeted Threat Protection](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- URL Protect
- Impersonation Protect
- Attachment Protect
","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastTTP/Mimecast_TTP_FunctionApp.json","true" -"Ttp_Url_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastTTPAPI","Mimecast","Mimecast Targeted Threat Protection","The data connector for [Mimecast Targeted Threat Protection](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- URL Protect
- Impersonation Protect
- Attachment Protect
","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastTTP/Mimecast_TTP_FunctionApp.json","true" -"MimecastAudit_CL","MimecastAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastAudit","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecastaudit","2022-02-24","2022-02-24","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastAuditAPI","Mimecast","Mimecast Audit & Authentication","The data connector for [Mimecast Audit & Authentication](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to audit and authentication events within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into user activity, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
Audit & Authentication
","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastAudit/Data%20Connectors/MimecastAudit_API_AzureFunctionApp.json","true" -"MimecastDLP_CL","MimecastSEG","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastSEG","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecastseg","2022-02-24","2022-02-24","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastSIEMAPI","Mimecast","Mimecast Secure Email Gateway","The data connector for [Mimecast Secure Email Gateway](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) allows easy log collection from the Secure Email Gateway to surface email insight and user activity within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities. Mimecast products and features required:
- Mimecast Secure Email Gateway
- Mimecast Data Leak Prevention
","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastSEG/Data%20Connectors/MimecastSEG_API_AzureFunctionApp.json","true" -"MimecastSIEM_CL","MimecastSEG","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastSEG","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecastseg","2022-02-24","2022-02-24","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastSIEMAPI","Mimecast","Mimecast Secure Email Gateway","The data connector for [Mimecast Secure Email Gateway](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) allows easy log collection from the Secure Email Gateway to surface email insight and user activity within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities. Mimecast products and features required:
- Mimecast Secure Email Gateway
- Mimecast Data Leak Prevention
","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastSEG/Data%20Connectors/MimecastSEG_API_AzureFunctionApp.json","true" -"Event","MimecastTIRegional","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTIRegional","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecasttiregional","2023-08-23","2023-09-11","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastTIRegionalConnectorAzureFunctions","Mimecast","Mimecast Intelligence for Microsoft - Microsoft Sentinel","The data connector for Mimecast Intelligence for Microsoft provides regional threat intelligence curated from Mimecast’s email inspection technologies with pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times.
Mimecast products and features required:
- Mimecast Secure Email Gateway
- Mimecast Threat Intelligence
","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTIRegional/Data%20Connectors/MimecastTIRegional_API_AzureFunctionApp.json","true" -"ThreatIntelligenceIndicator","MimecastTIRegional","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTIRegional","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecasttiregional","2023-08-23","2023-09-11","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastTIRegionalConnectorAzureFunctions","Mimecast","Mimecast Intelligence for Microsoft - Microsoft Sentinel","The data connector for Mimecast Intelligence for Microsoft provides regional threat intelligence curated from Mimecast’s email inspection technologies with pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times.
Mimecast products and features required:
- Mimecast Secure Email Gateway
- Mimecast Threat Intelligence
","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTIRegional/Data%20Connectors/MimecastTIRegional_API_AzureFunctionApp.json","true" -"MimecastTTPAttachment_CL","MimecastTTP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTTP","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecastttp","2022-02-24","2022-02-24","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastTTPAPI","Mimecast","Mimecast Targeted Threat Protection","The data connector for [Mimecast Targeted Threat Protection](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- URL Protect
- Impersonation Protect
- Attachment Protect
","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTTP/Data%20Connectors/MimecastTTP_API_FunctionApp.json","true" -"MimecastTTPImpersonation_CL","MimecastTTP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTTP","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecastttp","2022-02-24","2022-02-24","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastTTPAPI","Mimecast","Mimecast Targeted Threat Protection","The data connector for [Mimecast Targeted Threat Protection](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- URL Protect
- Impersonation Protect
- Attachment Protect
","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTTP/Data%20Connectors/MimecastTTP_API_FunctionApp.json","true" -"MimecastTTPUrl_CL","MimecastTTP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTTP","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecastttp","2022-02-24","2022-02-24","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastTTPAPI","Mimecast","Mimecast Targeted Threat Protection","The data connector for [Mimecast Targeted Threat Protection](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- URL Protect
- Impersonation Protect
- Attachment Protect
","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTTP/Data%20Connectors/MimecastTTP_API_FunctionApp.json","true" -"","Minemeld","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Minemeld","azuresentinel","azure-sentinel-solution-minemeld","2022-10-11","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","false" -"MDBALogTable_CL","MongoDBAtlas","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MongoDBAtlas","mongodb","azure-sentinel-solution-mongodbatlas","2025-08-22","","","MongoDB","Partner","https://www.mongodb.com/company/contact","","domains","MongoDBAtlasLogsAzureFunctions","MongoDB","MongoDB Atlas Logs","The [MongoDBAtlas](https://www.mongodb.com/products/platform/atlas-database) Logs connector gives the capability to upload MongoDB Atlas database logs into Microsoft Sentinel through the MongoDB Atlas Administration API. Refer to the [API documentation](https://www.mongodb.com/docs/api/doc/atlas-admin-api-v2/) for more information. The connector provides the ability to get a range of database log messages for the specified hosts and specified project.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MongoDBAtlas/Data%20Connectors/MongoDBAtlasLogs/MongoDBAtlasLogs_AzureFunction.json","true" -"MongoDBAudit_CL","MongoDBAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MongoDBAudit","azuresentinel","azure-sentinel-solution-mongodbaudit","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MongoDB","MongoDB","[Deprecated] MongoDB Audit","MongoDB data connector provides the capability to ingest [MongoDBAudit](https://www.mongodb.com/) into Microsoft Sentinel. Refer to [MongoDB documentation](https://www.mongodb.com/docs/manual/tutorial/getting-started/) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MongoDBAudit/Data%20Connectors/Connector_MongoDBAudit.json","true" -"MorphisecAlerts_CL","Morphisec","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Morphisec","morphisec","morphisec_utpp_mss","2022-05-05","","","Morphisec","Partner","https://support.morphisec.com/support/home","","domains","MorphisecCCF","Morphisec","Morphisec API Data Connector (via Codeless Connector Framework)","The [Morphisec](https://www.morphisec.com/) solution for Microsoft Sentinel enables you to seamlessly ingest security alerts directly from the Morphisec API. By leveraging Morphisec's proactive breach prevention and moving target defense capabilities, this integration enriches your security operations with high-fidelity, low-noise alerts on evasive threats.
This solution provides more than just data ingestion; it equips your security team with a full suite of ready-to-use content, including: Data Connector, ASIM Parser, Analytic Rule Templates and Workbook.
With this solution, you can empower your SOC to leverage Morphisec's powerful threat prevention within a unified investigation and response workflow in Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Morphisec/Data%20Connectors/Morphisec_CCF/Morphisec_ConnectorDefinition.json","true" -"MuleSoft_Cloudhub_CL","Mulesoft","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mulesoft","azuresentinel","azure-sentinel-solution-mulesoft","2022-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MuleSoft","MuleSoft","MuleSoft Cloudhub","The [MuleSoft Cloudhub](https://www.mulesoft.com/platform/saas/cloudhub-ipaas-cloud-based-integration) data connector provides the capability to retrieve logs from Cloudhub applications using the Cloudhub API and more events into Microsoft Sentinel through the REST API. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mulesoft/Data%20Connectors/MuleSoft_Cloudhub_API_FunctionApp.json","true" -"","Multi Cloud Attack Coverage Essentials - Resource Abuse","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Multi%20Cloud%20Attack%20Coverage%20Essentials%20-%20Resource%20Abuse","azuresentinel","azure-sentinel-solution-multicloudattackcoverage","2023-11-22","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"","NCSC-NL NDN Cyber Threat Intelligence Sharing","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NCSC-NL%20NDN%20Cyber%20Threat%20Intelligence%20Sharing","azuresentinel","azure-sentinel-solution-ncscnlndncti","2025-05-19","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"NGINX_CL","NGINX HTTP Server","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NGINX%20HTTP%20Server","azuresentinel","azure-sentinel-solution-nginx","2021-12-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","NGINXHTTPServer","Nginx","[Deprecated] NGINX HTTP Server","The NGINX HTTP Server data connector provides the capability to ingest [NGINX](https://nginx.org/en/) HTTP Server events into Microsoft Sentinel. Refer to [NGINX Logs documentation](https://nginx.org/en/docs/http/ngx_http_log_module.html) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NGINX%20HTTP%20Server/Data%20Connectors/Connector_NGINX_agent.json","true" -"","NISTSP80053","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NISTSP80053","azuresentinel","azure-sentinel-solution-nistsp80053","2022-02-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"BSMmacOS_CL","NXLog BSM macOS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLog%20BSM%20macOS","nxlogltd1589381969261","nxlog_bsm_macos_mss","2022-05-02","","","NXLog","Partner","https://nxlog.co/support-tickets/add/support-ticket","","domains","NXLogBSMmacOS","NXLog","NXLog BSM macOS","The [NXLog BSM](https://docs.nxlog.co/refman/current/im/bsm.html) macOS data connector uses Sun's Basic Security Module (BSM) Auditing API to read events directly from the kernel for capturing audit events on the macOS platform. This REST API connector can efficiently export macOS audit events to Microsoft Sentinel in real-time.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLog%20BSM%20macOS/Data%20Connectors/NXLogBSMmacOS.json","true" -"NXLogFIM_CL","NXLog FIM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLog%20FIM","nxlogltd1589381969261","nxlog_fim","2022-08-15","","","NXLog","Partner","https://nxlog.co/support-tickets/add/support-ticket","","domains","NXLogFIM","NXLog","NXLog FIM","The [NXLog FIM](https://docs.nxlog.co/refman/current/im/fim.html) module allows for the scanning of files and directories, reporting detected additions, changes, renames and deletions on the designated paths through calculated checksums during successive scans. This REST API connector can efficiently export the configured FIM events to Microsoft Sentinel in real time.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLog%20FIM/Data%20Connectors/NXLogFIM.json","true" -"LinuxAudit_CL","NXLog LinuxAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLog%20LinuxAudit","nxlogltd1589381969261","nxlog_linuxaudit_mss","2022-05-05","","","NXLog","Partner","https://nxlog.co/support-tickets/add/support-ticket","","domains","NXLogLinuxAudit","NXLog","NXLog LinuxAudit","The [NXLog LinuxAudit](https://docs.nxlog.co/refman/current/im/linuxaudit.html) data connector supports custom audit rules and collects logs without auditd or any other user-space software. IP addresses and group/user IDs are resolved to their respective names making [Linux audit](https://docs.nxlog.co/userguide/integrate/linux-audit.html) logs more intelligible to security analysts. This REST API connector can efficiently export Linux security events to Microsoft Sentinel in real-time.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLog%20LinuxAudit/Data%20Connectors/NXLogLinuxAudit.json","true" -"AIX_Audit_CL","NXLogAixAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLogAixAudit","nxlogltd1589381969261","nxlog_aix_audit","2022-05-05","","","NXLog","Partner","https://nxlog.co/support-tickets/add/support-ticket","","domains","NXLogAixAudit","NXLog","NXLog AIX Audit","The [NXLog AIX Audit](https://docs.nxlog.co/refman/current/im/aixaudit.html) data connector uses the AIX Audit subsystem to read events directly from the kernel for capturing audit events on the AIX platform. This REST API connector can efficiently export AIX Audit events to Microsoft Sentinel in real time.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLogAixAudit/Data%20Connectors/NXLogAixAudit.json","true" -"NXLog_DNS_Server_CL","NXLogDnsLogs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLogDnsLogs","nxlogltd1589381969261","nxlog_dns_logs","2022-05-24","","","NXLog","Partner","https://nxlog.co/support-tickets/add/support-ticket","","domains","NXLogDNSLogs","NXLog","NXLog DNS Logs","The NXLog DNS Logs data connector uses Event Tracing for Windows ([ETW](https://docs.microsoft.com/windows/apps/trace-processing/overview)) for collecting both Audit and Analytical DNS Server events. The [NXLog *im_etw* module](https://docs.nxlog.co/refman/current/im/etw.html) reads event tracing data directly for maximum efficiency, without the need to capture the event trace into an .etl file. This REST API connector can forward DNS Server events to Microsoft Sentinel in real time.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLogDnsLogs/Data%20Connectors/NXLogDnsLogs.json","true" -"Nasuni","Nasuni","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Nasuni","nasunicorporation","nasuni-sentinel","2023-07-07","2023-07-07","","Nasuni","Partner","https://github.com/nasuni-labs/Azure-Sentinel","","domains","NasuniEdgeAppliance","Nasuni","[Deprecated] Nasuni Edge Appliance","The [Nasuni](https://www.nasuni.com/) connector allows you to easily connect your Nasuni Edge Appliance Notifications and file system audit logs with Microsoft Sentinel. This gives you more insight into activity within your Nasuni infrastructure and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Nasuni/Data%20Connectors/Nasuni%20Data%20Connector.json","true" -"Syslog","Nasuni","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Nasuni","nasunicorporation","nasuni-sentinel","2023-07-07","2023-07-07","","Nasuni","Partner","https://github.com/nasuni-labs/Azure-Sentinel","","domains","NasuniEdgeAppliance","Nasuni","[Deprecated] Nasuni Edge Appliance","The [Nasuni](https://www.nasuni.com/) connector allows you to easily connect your Nasuni Edge Appliance Notifications and file system audit logs with Microsoft Sentinel. This gives you more insight into activity within your Nasuni infrastructure and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Nasuni/Data%20Connectors/Nasuni%20Data%20Connector.json","true" -"Netclean_Incidents_CL","NetClean ProActive","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NetClean%20ProActive","netcleantechnologiesab1651557549734","azure-sentinel-solution-netclean-proactive","2022-06-30","","","NetClean","Partner","https://www.netclean.com/contact","","domains","Netclean_ProActive_Incidents","NetClean Technologies","Netclean ProActive Incidents","This connector uses the Netclean Webhook (required) and Logic Apps to push data into Microsoft Sentinel Log Analytics","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NetClean%20ProActive/Data%20Connectors/Connector_NetClean.json","true" -"Netskope_CL","Netskope","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskope","netskope","netskope_mss","2022-05-05","","","Netskope","Partner","https://www.netskope.com/services#support","","domains","Netskope","Netskope","Netskope","The [Netskope Cloud Security Platform](https://www.netskope.com/platform) connector provides the capability to ingest Netskope logs and events into Microsoft Sentinel. The connector provides visibility into Netskope Platform Events and Alerts in Microsoft Sentinel to improve monitoring and investigation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskope/Data%20Connectors/Netskope/Netskope_API_FunctionApp.json","true" -"NetskopeAlerts_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeAlertsEvents","Netskope","Netskope Alerts and Events","Netskope Security Alerts and Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeAlertsEvents_RestAPI_CCP/NetskopeAlertsEvents_ConnectorDefination.json","true" -"NetskopeEventsApplication_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeAlertsEvents","Netskope","Netskope Alerts and Events","Netskope Security Alerts and Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeAlertsEvents_RestAPI_CCP/NetskopeAlertsEvents_ConnectorDefination.json","true" -"NetskopeEventsAudit_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeAlertsEvents","Netskope","Netskope Alerts and Events","Netskope Security Alerts and Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeAlertsEvents_RestAPI_CCP/NetskopeAlertsEvents_ConnectorDefination.json","true" -"NetskopeEventsConnection_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeAlertsEvents","Netskope","Netskope Alerts and Events","Netskope Security Alerts and Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeAlertsEvents_RestAPI_CCP/NetskopeAlertsEvents_ConnectorDefination.json","true" -"NetskopeEventsDLP_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeAlertsEvents","Netskope","Netskope Alerts and Events","Netskope Security Alerts and Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeAlertsEvents_RestAPI_CCP/NetskopeAlertsEvents_ConnectorDefination.json","true" -"NetskopeEventsEndpoint_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeAlertsEvents","Netskope","Netskope Alerts and Events","Netskope Security Alerts and Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeAlertsEvents_RestAPI_CCP/NetskopeAlertsEvents_ConnectorDefination.json","true" -"NetskopeEventsInfrastructure_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeAlertsEvents","Netskope","Netskope Alerts and Events","Netskope Security Alerts and Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeAlertsEvents_RestAPI_CCP/NetskopeAlertsEvents_ConnectorDefination.json","true" -"NetskopeEventsNetwork_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeAlertsEvents","Netskope","Netskope Alerts and Events","Netskope Security Alerts and Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeAlertsEvents_RestAPI_CCP/NetskopeAlertsEvents_ConnectorDefination.json","true" -"NetskopeEventsPage_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeAlertsEvents","Netskope","Netskope Alerts and Events","Netskope Security Alerts and Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeAlertsEvents_RestAPI_CCP/NetskopeAlertsEvents_ConnectorDefination.json","true" -"Netskope_WebTx_metrics_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" -"alertscompromisedcredentialdata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" -"alertsctepdata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" -"alertsdlpdata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" -"alertsmalsitedata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" -"alertsmalwaredata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" -"alertspolicydata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" -"alertsquarantinedata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" -"alertsremediationdata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" -"alertssecurityassessmentdata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" -"alertsubadata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" -"eventsapplicationdata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" -"eventsauditdata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" -"eventsconnectiondata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" -"eventsincidentdata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" -"eventsnetworkdata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" -"eventspagedata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" -"NetskopeWebtxData_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeWebTransactionsDataConnector","Netskope","Netskope Web Transactions Data Connector","The [Netskope Web Transactions](https://docs.netskope.com/en/netskope-help/data-security/transaction-events/netskope-transaction-events/) data connector provides the functionality of a docker image to pull the Netskope Web Transactions data from google pubsublite, process the data and ingest the processed data to Log Analytics. As part of this data connector two tables will be formed in Log Analytics, one for Web Transactions data and other for errors encountered during execution.


For more details related to Web Transactions refer to the below documentation:
1. Netskope Web Transactions documentation:
> https://docs.netskope.com/en/netskope-help/data-security/transaction-events/netskope-transaction-events/
","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeWebTransactionsDataConnector/Netskope_WebTransactions.json","true" -"NetskopeWebtxErrors_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeWebTransactionsDataConnector","Netskope","Netskope Web Transactions Data Connector","The [Netskope Web Transactions](https://docs.netskope.com/en/netskope-help/data-security/transaction-events/netskope-transaction-events/) data connector provides the functionality of a docker image to pull the Netskope Web Transactions data from google pubsublite, process the data and ingest the processed data to Log Analytics. As part of this data connector two tables will be formed in Log Analytics, one for Web Transactions data and other for errors encountered during execution.


For more details related to Web Transactions refer to the below documentation:
1. Netskope Web Transactions documentation:
> https://docs.netskope.com/en/netskope-help/data-security/transaction-events/netskope-transaction-events/
","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeWebTransactionsDataConnector/Netskope_WebTransactions.json","true" -"","Network Session Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Network%20Session%20Essentials","azuresentinel","azure-sentinel-solution-networksession","2022-11-11","2022-11-11","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"","Network Threat Protection Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Network%20Threat%20Protection%20Essentials","azuresentinel","azure-sentinel-solution-networkthreatdetection","2022-11-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"CommonSecurityLog","Netwrix Auditor","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netwrix%20Auditor","azuresentinel","azure-sentinel-solution-netwrixauditor","2022-06-17","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Netwrix","Netwrix","[Deprecated] Netwrix Auditor via Legacy Agent","Netwrix Auditor data connector provides the capability to ingest [Netwrix Auditor (formerly Stealthbits Privileged Activity Manager)](https://www.netwrix.com/auditor.html) events into Microsoft Sentinel. Refer to [Netwrix documentation](https://helpcenter.netwrix.com/) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netwrix%20Auditor/Data%20Connectors/Connector_NetwrixAuditor.json","true" -"CommonSecurityLog","Netwrix Auditor","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netwrix%20Auditor","azuresentinel","azure-sentinel-solution-netwrixauditor","2022-06-17","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","NetwrixAma","Netwrix","[Deprecated] Netwrix Auditor via AMA","Netwrix Auditor data connector provides the capability to ingest [Netwrix Auditor (formerly Stealthbits Privileged Activity Manager)](https://www.netwrix.com/auditor.html) events into Microsoft Sentinel. Refer to [Netwrix documentation](https://helpcenter.netwrix.com/) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netwrix%20Auditor/Data%20Connectors/template_NetwrixAuditorAMA.json","true" -"","Neustar IP GeoPoint","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Neustar%20IP%20GeoPoint","azuresentinel","azure-sentinel-solution-neustaripgeopoint","2022-09-30","2022-09-30","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"NonameAPISecurityAlert_CL","NonameSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NonameSecurity","nonamegate","nonamesecurity_sentinelsolution","2022-12-01","","","Noname Security","Partner","https://nonamesecurity.com/","","domains","NonameSecurityMicrosoftSentinel","Noname Security","Noname Security for Microsoft Sentinel","Noname Security solution to POST data into a Microsoft Sentinel SIEM workspace via the Azure Monitor REST API","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NonameSecurity/Data%20Connectors/Connector_RESTAPI_NonameSecurity.json","true" -"NordPassEventLogs_CL","NordPass","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NordPass","nordsecurityinc","azure-sentinel-solution-nordpass","2025-04-22","","","NordPass","Partner","https://support.nordpass.com/","","domains","NordPass","NordPass","NordPass","Integrating NordPass with Microsoft Sentinel SIEM via the API will allow you to automatically transfer Activity Log data from NordPass to Microsoft Sentinel and get real-time insights, such as item activity, all login attempts, and security notifications.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NordPass/Data%20Connectors/NordPass_API_FunctionApp.json;https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NordPass/Data%20Connectors/deployment/NordPass_data_connector.json","false" -"CommonSecurityLog","NozomiNetworks","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NozomiNetworks","azuresentinel","azure-sentinel-solution-nozominetworks","2022-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","NozomiNetworksN2OS","Nozomi Networks","[Deprecated] Nozomi Networks N2OS via Legacy Agent","The [Nozomi Networks](https://www.nozominetworks.com/) data connector provides the capability to ingest Nozomi Networks Events into Microsoft Sentinel. Refer to the Nozomi Networks [PDF documentation](https://www.nozominetworks.com/resources/data-sheets-brochures-learning-guides/) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NozomiNetworks/Data%20Connectors/NozomiNetworksN2OS.json","true" -"CommonSecurityLog","NozomiNetworks","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NozomiNetworks","azuresentinel","azure-sentinel-solution-nozominetworks","2022-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","NozomiNetworksN2OSAma","Nozomi Networks","[Deprecated] Nozomi Networks N2OS via AMA","The [Nozomi Networks](https://www.nozominetworks.com/) data connector provides the capability to ingest Nozomi Networks Events into Microsoft Sentinel. Refer to the Nozomi Networks [PDF documentation](https://www.nozominetworks.com/resources/data-sheets-brochures-learning-guides/) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NozomiNetworks/Data%20Connectors/template_NozomiNetworksN2OSAMA.json","true" -"CommonSecurityLog","OSSEC","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OSSEC","azuresentinel","azure-sentinel-solution-ossec","2022-05-19","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","OSSEC","OSSEC","[Deprecated] OSSEC via Legacy Agent","OSSEC data connector provides the capability to ingest [OSSEC](https://www.ossec.net/) events into Microsoft Sentinel. Refer to [OSSEC documentation](https://www.ossec.net/docs) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OSSEC/Data%20Connectors/Connector_CEF_OSSEC.json","true" -"CommonSecurityLog","OSSEC","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OSSEC","azuresentinel","azure-sentinel-solution-ossec","2022-05-19","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","OSSECAma","OSSEC","[Deprecated] OSSEC via AMA","OSSEC data connector provides the capability to ingest [OSSEC](https://www.ossec.net/) events into Microsoft Sentinel. Refer to [OSSEC documentation](https://www.ossec.net/docs) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OSSEC/Data%20Connectors/template_OSSECAMA.json","true" -"ObsidianActivity_CL","Obsidian Datasharing","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Obsidian%20Datasharing","391c3d87-edc8-4f72-a719-825c022b8eb4","azure-sentinel-solution-obsidian-activity-threat","2024-01-01","","","Obsidian Security","Partner","https://obsidiansecurity.com/contact","","domains","ObsidianDatasharing","Obsidian Security","Obsidian Datasharing Connector","The Obsidian Datasharing connector provides the capability to read raw event data from Obsidian Datasharing in Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Obsidian%20Datasharing/Data%20Connectors/ObsidianDatasharing_CCP/ObsidianDatasharing_ConnectorDefinition.json","true" -"ObsidianThreat_CL","Obsidian Datasharing","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Obsidian%20Datasharing","391c3d87-edc8-4f72-a719-825c022b8eb4","azure-sentinel-solution-obsidian-activity-threat","2024-01-01","","","Obsidian Security","Partner","https://obsidiansecurity.com/contact","","domains","ObsidianDatasharing","Obsidian Security","Obsidian Datasharing Connector","The Obsidian Datasharing connector provides the capability to read raw event data from Obsidian Datasharing in Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Obsidian%20Datasharing/Data%20Connectors/ObsidianDatasharing_CCP/ObsidianDatasharing_ConnectorDefinition.json","true" -"Okta_CL","Okta Single Sign-On","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On","azuresentinel","azure-sentinel-solution-okta","2022-03-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OktaSSO","Okta","Okta Single Sign-On","The [Okta Single Sign-On (SSO)](https://www.okta.com/products/single-sign-on/) connector provides the capability to ingest audit and event logs from the Okta API into Microsoft Sentinel. The connector provides visibility into these log types in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On/Data%20Connectors/OktaSingleSign-On/Connector_REST_API_FunctionApp_Okta.json","true" -"OktaNativePoller_CL","Okta Single Sign-On","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On","azuresentinel","azure-sentinel-solution-okta","2022-03-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OktaSSO_Polling","Okta","Okta Single Sign-On (Polling CCP)","The [Okta Single Sign-On (SSO)](https://www.okta.com/products/single-sign-on/) connector provides the capability to ingest audit and event logs from the Okta API into Microsoft entinel. The connector provides visibility into these log types in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On/Data%20Connectors/OktaNativePollerConnector/azuredeploy_Okta_native_poller_connector.json","true" -"OktaV2_CL","Okta Single Sign-On","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On","azuresentinel","azure-sentinel-solution-okta","2022-03-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OktaSSOv2","Microsoft","Okta Single Sign-On","The [Okta Single Sign-On (SSO)](https://www.okta.com/products/single-sign-on/) data connector provides the capability to ingest audit and event logs from the Okta Sysem Log API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform and uses the Okta System Log API to fetch the events. The connector supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On/Data%20Connectors/OktaNativePollerConnectorV2/OktaSSOv2_DataConnectorDefinition.json","true" -"Okta_CL","Okta Single Sign-On","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On","azuresentinel","azure-sentinel-solution-okta","2022-03-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OktaSSOv2","Microsoft","Okta Single Sign-On","The [Okta Single Sign-On (SSO)](https://www.okta.com/products/single-sign-on/) data connector provides the capability to ingest audit and event logs from the Okta Sysem Log API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform and uses the Okta System Log API to fetch the events. The connector supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On/Data%20Connectors/OktaNativePollerConnectorV2/OktaSSOv2_DataConnectorDefinition.json","true" -"signIns","Okta Single Sign-On","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On","azuresentinel","azure-sentinel-solution-okta","2022-03-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OktaSSOv2","Microsoft","Okta Single Sign-On (Preview)","The [Okta Single Sign-On (SSO)](https://www.okta.com/products/single-sign-on/) data connector provides the capability to ingest audit and event logs from the Okta Sysem Log API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform and uses the Okta System Log API to fetch the events. The connector supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On/Data%20Connectors/OktaNativePollerConnectorV2/azuredeploy_Okta_native_poller_connector_v2.json","true" -"Onapsis_Defend_CL","Onapsis Defend","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Onapsis%20Defend","onapsis","azure-sentinel-solution-onapsis-defend","2025-07-17","2025-07-17","","Onapsis","Partner","https://onapsis.com/support/","","domains","Onapsis","Onapsis Platform","Onapsis Defend Integration","Onapsis Defend Integration is aimed at forwarding alerts and logs collected and detected by Onapsis Platform into Microsoft Sentinel SIEM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Onapsis%20Defend/Data%20Connectors/Onapsis.json","true" -"Onapsis_Defend_CL","Onapsis Defend","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Onapsis%20Defend","onapsis","azure-sentinel-solution-onapsis-defend","2025-07-17","2025-07-17","","Onapsis","Partner","https://onapsis.com/support/","","domains","Onapsis","Onapsis SE","Onapsis Defend: Integrate Unmatched SAP Threat Detection & Intel with Microsoft Sentinel","Empower security teams with deep visibility into unique exploit, zero-day, and threat actor activity; suspicious user or insider behavior; sensitive data downloads; security control violations; and more - all enriched by the SAP experts at Onapsis.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Onapsis%20Defend/Data%20Connectors/Onapsis_PUSH_CCP/Onapsis_connectorDefinition.json","true" -"CommonSecurityLog","Onapsis Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Onapsis%20Platform","onapsis","onapsis_mss","2022-05-11","","","Onapsis","Partner","https://onapsis.com/company/contact-us","","domains","OnapsisPlatform","Onapsis","[Deprecated] Onapsis Platform","The Onapsis Connector allows you to export the alarms triggered in the Onapsis Platform into Microsoft Sentinel in real-time. This gives you the ability to monitor the activity on your SAP systems, identify incidents and respond to them quickly.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Onapsis%20Platform/Data%20Connectors/OnapsisPlatform.json","true" -"CommonSecurityLog","OneIdentity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneIdentity","quest","oneidentity_mss","2022-05-02","","","One Identity","Partner","https://support.oneidentity.com/","","domains","OneIdentity","One Identity LLC.","One Identity Safeguard","The One Identity Safeguard (CEF) Sentinel data connector enhances the standard Common Event Format (CEF) connector with Safeguard for Privileged Sessions-specific dashboards. Use this connector to easily start utilizing the events generated by your device for visualization, alerts, investigations and more.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneIdentity/Data%20Connectors/OneIdentity.JSON","true" -"OneLoginEventsV2_CL","OneLoginIAM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM","azuresentinel","azure-sentinel-solution-oneloginiam","2022-08-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OneLogin","OneLogin","[DEPRECATED] OneLogin IAM Platform","The [OneLogin](https://www.onelogin.com/) data connector provides the capability to ingest common OneLogin IAM Platform events into Microsoft Sentinel through Webhooks. The OneLogin Event Webhook API which is also known as the Event Broadcaster will send batches of events in near real-time to an endpoint that you specify. When a change occurs in the OneLogin, an HTTPS POST request with event information is sent to a callback data connector URL. Refer to [Webhooks documentation](https://developers.onelogin.com/api-docs/1/events/webhooks) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM/Data%20Connectors/OneLogin_Webhooks_FunctionApp.json","true" -"OneLoginUsersV2_CL","OneLoginIAM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM","azuresentinel","azure-sentinel-solution-oneloginiam","2022-08-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OneLogin","OneLogin","[DEPRECATED] OneLogin IAM Platform","The [OneLogin](https://www.onelogin.com/) data connector provides the capability to ingest common OneLogin IAM Platform events into Microsoft Sentinel through Webhooks. The OneLogin Event Webhook API which is also known as the Event Broadcaster will send batches of events in near real-time to an endpoint that you specify. When a change occurs in the OneLogin, an HTTPS POST request with event information is sent to a callback data connector URL. Refer to [Webhooks documentation](https://developers.onelogin.com/api-docs/1/events/webhooks) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM/Data%20Connectors/OneLogin_Webhooks_FunctionApp.json","true" -"OneLogin_CL","OneLoginIAM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM","azuresentinel","azure-sentinel-solution-oneloginiam","2022-08-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OneLogin","OneLogin","[DEPRECATED] OneLogin IAM Platform","The [OneLogin](https://www.onelogin.com/) data connector provides the capability to ingest common OneLogin IAM Platform events into Microsoft Sentinel through Webhooks. The OneLogin Event Webhook API which is also known as the Event Broadcaster will send batches of events in near real-time to an endpoint that you specify. When a change occurs in the OneLogin, an HTTPS POST request with event information is sent to a callback data connector URL. Refer to [Webhooks documentation](https://developers.onelogin.com/api-docs/1/events/webhooks) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM/Data%20Connectors/OneLogin_Webhooks_FunctionApp.json","true" -"OneLoginEventsV2_CL","OneLoginIAM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM","azuresentinel","azure-sentinel-solution-oneloginiam","2022-08-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OneLoginIAMLogsCCPDefinition","Microsoft","OneLogin IAM Platform (via Codeless Connector Framework)","The [OneLogin](https://www.onelogin.com/) data connector provides the capability to ingest common OneLogin IAM Platform events into Microsoft Sentinel through REST API by using OneLogin [Events API](https://developers.onelogin.com/api-docs/1/events/get-events) and OneLogin [Users API](https://developers.onelogin.com/api-docs/1/users/get-users). The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM/Data%20Connectors/OneLoginIAMLogs_ccp/OneLoginIAMLogs_ConnectorDefinition.json","true" -"OneLoginUsersV2_CL","OneLoginIAM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM","azuresentinel","azure-sentinel-solution-oneloginiam","2022-08-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OneLoginIAMLogsCCPDefinition","Microsoft","OneLogin IAM Platform (via Codeless Connector Framework)","The [OneLogin](https://www.onelogin.com/) data connector provides the capability to ingest common OneLogin IAM Platform events into Microsoft Sentinel through REST API by using OneLogin [Events API](https://developers.onelogin.com/api-docs/1/events/get-events) and OneLogin [Users API](https://developers.onelogin.com/api-docs/1/users/get-users). The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM/Data%20Connectors/OneLoginIAMLogs_ccp/OneLoginIAMLogs_ConnectorDefinition.json","true" -"OneTrustMetadataV3_CL","OneTrust","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneTrust","onetrustllc1594047340198","azure-sentinel-solution-onetrust","2025-10-24","2025-10-24","","OneTrust, LLC","Partner","https://www.onetrust.com/support/","","domains","OnetrustPush","OneTrust","OneTrust","The OneTrust connector for Microsoft Sentinel provides the capability to have near real time visibility into where sensitive data has been located or remediated across across Google Cloud and other OneTrust supported data sources.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneTrust/Data%20Connectors/OneTrustLogs_CCF/OneTrustLogs_connectorDefinition.json","true" -"","Open Systems","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Open%20Systems","opensystemsag1582030008223","azure-sentinel-solution-osag","2025-05-12","","","Open Systems","Partner","https://www.open-systems.com/support","","domains","","","","","","false" -"","OpenCTI","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OpenCTI","azuresentinel","azure-sentinel-solution-opencti","2022-09-22","2022-09-22","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"Syslog","OpenVPN","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OpenVPN","azuresentinel","azure-sentinel-solution-openvpn","2022-08-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OpenVPN","OpenVPN","[Deprecated] OpenVPN Server","The [OpenVPN](https://github.com/OpenVPN) data connector provides the capability to ingest OpenVPN Server logs into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OpenVPN/Data%20Connectors/OpenVPN_Syslog.json","true" -"OCI_LogsV2_CL","Oracle Cloud Infrastructure","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Oracle%20Cloud%20Infrastructure","azuresentinel","azure-sentinel-solution-ocilogs","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OCI-Connector-CCP-Definition","Microsoft","Oracle Cloud Infrastructure (via Codeless Connector Framework)","The Oracle Cloud Infrastructure (OCI) data connector provides the capability to ingest OCI Logs from [OCI Stream](https://docs.oracle.com/iaas/Content/Streaming/Concepts/streamingoverview.htm) into Microsoft Sentinel using the [OCI Streaming REST API](https://docs.oracle.com/iaas/api/#/streaming/streaming/20180418).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Oracle%20Cloud%20Infrastructure/Data%20Connectors/Oracle_Cloud_Infrastructure_CCP/OCI_DataConnector_DataConnectorDefinition.json","true" -"OCI_Logs_CL","Oracle Cloud Infrastructure","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Oracle%20Cloud%20Infrastructure","azuresentinel","azure-sentinel-solution-ocilogs","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OracleCloudInfrastructureLogsConnector","Oracle","[DEPRECATED] Oracle Cloud Infrastructure","The Oracle Cloud Infrastructure (OCI) data connector provides the capability to ingest OCI Logs from [OCI Stream](https://docs.oracle.com/iaas/Content/Streaming/Concepts/streamingoverview.htm) into Microsoft Sentinel using the [OCI Streaming REST API](https://docs.oracle.com/iaas/api/#/streaming/streaming/20180418).

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Oracle%20Cloud%20Infrastructure/Data%20Connectors/OCI_logs_API_FunctionApp.json","true" -"Syslog","OracleDatabaseAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OracleDatabaseAudit","azuresentinel","azure-sentinel-solution-oracledbaudit","2021-11-05","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OracleDatabaseAudit","Oracle","[Deprecated] Oracle Database Audit","The Oracle DB Audit data connector provides the capability to ingest [Oracle Database](https://www.oracle.com/database/technologies/) audit events into Microsoft Sentinel through the syslog. Refer to [documentation](https://docs.oracle.com/en/database/oracle/oracle-database/21/dbseg/introduction-to-auditing.html#GUID-94381464-53A3-421B-8F13-BD171C867405) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OracleDatabaseAudit/Data%20Connectors/Connector_OracleDatabaseAudit.json","true" -"OracleWebLogicServer_CL","OracleWebLogicServer","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OracleWebLogicServer","azuresentinel","azure-sentinel-solution-oracleweblogicserver","2022-01-06","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OracleWebLogicServer","Oracle","[Deprecated] Oracle WebLogic Server","OracleWebLogicServer data connector provides the capability to ingest [OracleWebLogicServer](https://docs.oracle.com/en/middleware/standalone/weblogic-server/index.html) events into Microsoft Sentinel. Refer to [OracleWebLogicServer documentation](https://docs.oracle.com/en/middleware/standalone/weblogic-server/14.1.1.0/index.html) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OracleWebLogicServer/Data%20Connectors/Connector_OracleWebLogicServer_agent.json","true" -"OrcaAlerts_CL","Orca Security Alerts","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Orca%20Security%20Alerts","orcasecurityinc1621870991703","orca_security_alerts_mss","2022-05-10","","","Orca Security","Partner","https://orca.security/about/contact/","","domains","OrcaSecurityAlerts","Orca Security","Orca Security Alerts","The Orca Security Alerts connector allows you to easily export Alerts logs to Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Orca%20Security%20Alerts/Data%20Connectors/OrcaSecurityAlerts.json","true" -"","PCI DSS Compliance","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PCI%20DSS%20Compliance","azuresentinel","azure-sentinel-solution-pcidsscompliance","2022-06-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","false" -"","PDNS Block Data Connector","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PDNS%20Block%20Data%20Connector","azuresentinel","azure-sentinel-solution-pdnsblockdataconnector","2023-03-31","","","Nominet PDNS Support","Partner","https://www.protectivedns.service.ncsc.gov.uk/pdns","","domains","","","","","","false" -"CommonSecurityLog","Palo Alto - XDR (Cortex)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20-%20XDR%20%28Cortex%29","","","","","","","","","","","PaloAltoNetworksCortex","Palo Alto Networks","Palo Alto Networks Cortex XDR","The Palo Alto Networks Cortex XDR connector gives you an easy way to connect to your Cortex XDR logs with Microsoft Sentinel. This increases the visibility of your endpoint security. It will give you better ability to monitor your resources by creating custom Workbooks, analytics rules, Incident investigation, and evidence gathering.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20-%20XDR%20%28Cortex%29/Data%20Connectors/Connector_PaloAlto_XDR_CEF.json","true" -"PaloAltoCortexXDR_Alerts_CL","Palo Alto Cortex XDR CCP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP","azuresentinel","azure-sentinel-solution-cortexccp","2024-12-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" -"PaloAltoCortexXDR_Audit_Agent_CL","Palo Alto Cortex XDR CCP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP","azuresentinel","azure-sentinel-solution-cortexccp","2024-12-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" -"PaloAltoCortexXDR_Audit_Management_CL","Palo Alto Cortex XDR CCP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP","azuresentinel","azure-sentinel-solution-cortexccp","2024-12-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" -"PaloAltoCortexXDR_Endpoints_CL","Palo Alto Cortex XDR CCP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP","azuresentinel","azure-sentinel-solution-cortexccp","2024-12-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" -"PaloAltoCortexXDR_Incidents_CL","Palo Alto Cortex XDR CCP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP","azuresentinel","azure-sentinel-solution-cortexccp","2024-12-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" -"CortexXpanseAlerts_CL","Palo Alto Cortex Xpanse CCF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20Xpanse%20CCF","azuresentinel","azure-sentinel-solution-cortexxpanse","2024-12-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PaloAltoExpanseCCPDefinition","Microsoft","Palo Alto Cortex Xpanse (via Codeless Connector Framework)","The Palo Alto Cortex Xpanse data connector ingests alerts data into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20Xpanse%20CCF/Data%20Connectors/CortexXpanse_ccp/CortexXpanse_ConnectorDefinition.json","true" -"PrismaCloudCompute_CL","Palo Alto Prisma Cloud CWPP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Prisma%20Cloud%20CWPP","azuresentinel","azure-sentinel-solution-prismacloudcompute","2022-06-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","PaloAltoPrismaCloudCWPP","Microsoft","Palo Alto Prisma Cloud CWPP (using REST API)","The [Palo Alto Prisma Cloud CWPP](https://prisma.pan.dev/api/cloud/cwpp/audits/#operation/get-audits-incidents) data connector allows you to connect to your Palo Alto Prisma Cloud CWPP instance and ingesting alerts into Microsoft Sentinel. The data connector is built on Microsoft Sentinel's Codeless Connector Platform and uses the Prisma Cloud API to fetch security events and supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Prisma%20Cloud%20CWPP/Data%20Connectors/PaloAltoPrismaCloudCWPP_ccp/connectorDefinition.json","true" -"PrismaCloudCompute_CL","Palo Alto Prisma Cloud CWPP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Prisma%20Cloud%20CWPP","azuresentinel","azure-sentinel-solution-prismacloudcompute","2022-06-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","PrismaCloudComputeNativePoller","Microsoft","Palo Alto Prisma Cloud CWPP (using REST API)","The [Palo Alto Prisma Cloud CWPP](https://prisma.pan.dev/api/cloud/cwpp/audits/#operation/get-audits-incidents) data connector allows you to connect to your Prisma Cloud CWPP instance and ingesting alerts into Microsoft Sentinel. The data connector is built on Microsoft Sentinel’s Codeless Connector Platform and uses the Prisma Cloud API to fetch security events and supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Prisma%20Cloud%20CWPP/Data%20Connectors/PrismaCloudCompute_CLV2.json","true" -"CommonSecurityLog","PaloAlto-PAN-OS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAlto-PAN-OS","azuresentinel","azure-sentinel-solution-paloaltopanos","2021-08-09","2021-09-20","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PaloAltoNetworks","Palo Alto Networks","[Deprecated] Palo Alto Networks (Firewall) via Legacy Agent","The Palo Alto Networks firewall connector allows you to easily connect your Palo Alto Networks logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAlto-PAN-OS/Data%20Connectors/PaloAltoNetworks.json","true" -"CommonSecurityLog","PaloAlto-PAN-OS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAlto-PAN-OS","azuresentinel","azure-sentinel-solution-paloaltopanos","2021-08-09","2021-09-20","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PaloAltoNetworksAma","Palo Alto Networks","[Deprecated] Palo Alto Networks (Firewall) via AMA","The Palo Alto Networks firewall connector allows you to easily connect your Palo Alto Networks logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAlto-PAN-OS/Data%20Connectors/template_PaloAltoNetworksAMA.json","true" -"CommonSecurityLog","PaloAltoCDL","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoCDL","azuresentinel","azure-sentinel-solution-paloaltocdl","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PaloAltoCDL","Palo Alto Networks","[Deprecated] Palo Alto Networks Cortex Data Lake (CDL) via Legacy Agent","The [Palo Alto Networks CDL](https://www.paloaltonetworks.com/cortex/cortex-data-lake) data connector provides the capability to ingest [CDL logs](https://docs.paloaltonetworks.com/strata-logging-service/log-reference/log-forwarding-schema-overview) into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoCDL/Data%20Connectors/Connector_PaloAlto_CDL_CEF.json","true" -"CommonSecurityLog","PaloAltoCDL","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoCDL","azuresentinel","azure-sentinel-solution-paloaltocdl","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PaloAltoCDLAma","Palo Alto Networks","[Deprecated] Palo Alto Networks Cortex Data Lake (CDL) via AMA","The [Palo Alto Networks CDL](https://www.paloaltonetworks.com/cortex/cortex-data-lake) data connector provides the capability to ingest [CDL logs](https://docs.paloaltonetworks.com/strata-logging-service/log-reference/log-forwarding-schema-overview) into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoCDL/Data%20Connectors/template_PaloAlto_CDLAMA.json","true" -"PaloAltoPrismaCloudAlert_CL","PaloAltoPrismaCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud","azuresentinel","azure-sentinel-solution-paloaltoprisma","2021-04-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PaloAltoPrismaCloud","Palo Alto","[DEPRECATED] Palo Alto Prisma Cloud CSPM","The Palo Alto Prisma Cloud CSPM data connector provides the capability to ingest [Prisma Cloud CSPM alerts](https://prisma.pan.dev/api/cloud/cspm/alerts#operation/get-alerts) and [audit logs](https://prisma.pan.dev/api/cloud/cspm/audit-logs#operation/rl-audit-logs) into Microsoft sentinel using the Prisma Cloud CSPM API. Refer to [Prisma Cloud CSPM API documentation](https://prisma.pan.dev/api/cloud/cspm) for more information.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud/Data%20Connectors/PrismaCloud_API_FunctionApp.json","true" -"PaloAltoPrismaCloudAudit_CL","PaloAltoPrismaCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud","azuresentinel","azure-sentinel-solution-paloaltoprisma","2021-04-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PaloAltoPrismaCloud","Palo Alto","[DEPRECATED] Palo Alto Prisma Cloud CSPM","The Palo Alto Prisma Cloud CSPM data connector provides the capability to ingest [Prisma Cloud CSPM alerts](https://prisma.pan.dev/api/cloud/cspm/alerts#operation/get-alerts) and [audit logs](https://prisma.pan.dev/api/cloud/cspm/audit-logs#operation/rl-audit-logs) into Microsoft sentinel using the Prisma Cloud CSPM API. Refer to [Prisma Cloud CSPM API documentation](https://prisma.pan.dev/api/cloud/cspm) for more information.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud/Data%20Connectors/PrismaCloud_API_FunctionApp.json","true" -"PaloAltoPrismaCloudAlertV2_CL","PaloAltoPrismaCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud","azuresentinel","azure-sentinel-solution-paloaltoprisma","2021-04-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PaloAltoPrismaCloudCSPMCCPDefinition","Microsoft","Palo Alto Prisma Cloud CSPM (via Codeless Connector Framework)","The Palo Alto Prisma Cloud CSPM data connector allows you to connect to your Palo Alto Prisma Cloud CSPM instance and ingesting Alerts (https://pan.dev/prisma-cloud/api/cspm/alerts/) & Audit Logs(https://pan.dev/prisma-cloud/api/cspm/audit-logs/) into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud/Data%20Connectors/PrismaCloudCSPMLog_CCF/PaloAltoPrismaCloudCSPMLog_ConnectorDefinition.json","true" -"PaloAltoPrismaCloudAuditV2_CL","PaloAltoPrismaCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud","azuresentinel","azure-sentinel-solution-paloaltoprisma","2021-04-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PaloAltoPrismaCloudCSPMCCPDefinition","Microsoft","Palo Alto Prisma Cloud CSPM (via Codeless Connector Framework)","The Palo Alto Prisma Cloud CSPM data connector allows you to connect to your Palo Alto Prisma Cloud CSPM instance and ingesting Alerts (https://pan.dev/prisma-cloud/api/cspm/alerts/) & Audit Logs(https://pan.dev/prisma-cloud/api/cspm/audit-logs/) into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud/Data%20Connectors/PrismaCloudCSPMLog_CCF/PaloAltoPrismaCloudCSPMLog_ConnectorDefinition.json","true" -"ABAPAuditLog","Pathlock_TDnR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Pathlock_TDnR","pathlockinc1631410274035","pathlock_tdnr","2022-02-17","","","Pathlock Inc.","Partner","https://pathlock.com/support/","","domains,verticals","Pathlock_TDnR","Pathlock Inc.","Pathlock Inc.: Threat Detection and Response for SAP","The [Pathlock Threat Detection and Response (TD&R)](https://pathlock.com/products/cybersecurity-application-controls/) integration with **Microsoft Sentinel Solution for SAP** delivers unified, real-time visibility into SAP security events, enabling organizations to detect and act on threats across all SAP landscapes. This out-of-the-box integration allows Security Operations Centers (SOCs) to correlate SAP-specific alerts with enterprise-wide telemetry, creating actionable intelligence that connects IT security with business processes.

Pathlock’s connector is purpose-built for SAP and forwards only **security-relevant events by default**, minimizing data volume and noise while maintaining the flexibility to forward all log sources when needed. Each event is enriched with **business process context**, allowing Microsoft Sentinel Solution for SAP analytics to distinguish operational patterns from real threats and to prioritize what truly matters.

This precision-driven approach helps security teams drastically reduce false positives, focus investigations, and accelerate **mean time to detect (MTTD)** and **mean time to respond (MTTR)**. Pathlock’s library consists of more than 1,500 SAP-specific detection signatures across 70+ log sources, the solution uncovers complex attack behaviors, configuration weaknesses, and access anomalies.

By combining business-context intelligence with advanced analytics, Pathlock enables enterprises to strengthen detection accuracy, streamline response actions, and maintain continuous control across their SAP environments—without adding complexity or redundant monitoring layers.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Pathlock_TDnR/Data%20Connectors/Pathlock_TDnR_PUSH_CCP/Pathlock_TDnR_connectorDefinition.json","true" -"Pathlock_TDnR_CL","Pathlock_TDnR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Pathlock_TDnR","pathlockinc1631410274035","pathlock_tdnr","2022-02-17","","","Pathlock Inc.","Partner","https://pathlock.com/support/","","domains,verticals","Pathlock_TDnR","Pathlock Inc.","Pathlock Threat Detection and Response Integration","Pathlock Threat Detection and Response enables seamless forwarding of security alerts and logs detected and collected by the Pathlock Platform into Microsoft Sentinel Solution for SAP.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Pathlock_TDnR/Data%20Connectors/Pathlock_TDnR.json","true" -"Perimeter81_CL","Perimeter 81","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Perimeter%2081","perimeter811605117499319","perimeter_81___mss","2022-05-06","","","Perimeter 81","Partner","https://support.perimeter81.com/docs","","domains","Perimeter81ActivityLogs","Perimeter 81","Perimeter 81 Activity Logs","The Perimeter 81 Activity Logs connector allows you to easily connect your Perimeter 81 activity logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Perimeter%2081/Data%20Connectors/Perimeter81ActivityLogs.json","true" -"Phosphorus_CL","Phosphorus","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Phosphorus","4043","microsoft-sentinel-solution-phosphorus","2024-08-13","2024-08-13","","Phosphorus Inc.","Partner","https://phosphorus.io","","domains","Phosphorus_Polling","Phosphorus Inc.","Phosphorus Devices","The Phosphorus Device Connector provides the capability to Phosphorus to ingest device data logs into Microsoft Sentinel through the Phosphorus REST API. The Connector provides visibility into the devices enrolled in Phosphorus. This Data Connector pulls devices information along with its corresponding alerts.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Phosphorus/Data%20Connectors/PhosphorusDataConnector.json","true" -"CommonSecurityLog","PingFederate","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PingFederate","azuresentinel","azure-sentinel-solution-pingfederate","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PingFederate","Ping Identity","[Deprecated] PingFederate via Legacy Agent","The [PingFederate](https://www.pingidentity.com/en/software/pingfederate.html) data connector provides the capability to ingest [PingFederate events](https://docs.pingidentity.com/bundle/pingfederate-102/page/lly1564002980532.html) into Microsoft Sentinel. Refer to [PingFederate documentation](https://docs.pingidentity.com/bundle/pingfederate-102/page/tle1564002955874.html) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PingFederate/Data%20Connectors/Connector_CEF_PingFederate.json","true" -"CommonSecurityLog","PingFederate","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PingFederate","azuresentinel","azure-sentinel-solution-pingfederate","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PingFederateAma","Ping Identity","[Deprecated] PingFederate via AMA","The [PingFederate](https://www.pingidentity.com/en/software/pingfederate.html) data connector provides the capability to ingest [PingFederate events](https://docs.pingidentity.com/bundle/pingfederate-102/page/lly1564002980532.html) into Microsoft Sentinel. Refer to [PingFederate documentation](https://docs.pingidentity.com/bundle/pingfederate-102/page/tle1564002955874.html) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PingFederate/Data%20Connectors/template_PingFederateAMA.json","true" -"PingOne_AuditActivitiesV2_CL","PingOne","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PingOne","azuresentinel","azure-sentinel-pingone","2025-04-20","2025-04-20","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PingOneAuditLogsCCPDefinition","Microsoft","Ping One (via Codeless Connector Framework)","This connector ingests **audit activity logs** from the PingOne Identity platform into Microsoft Sentinel using a Codeless Connector Framework.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PingOne/Data%20Connectors/PingOneAuditLogs_ccp/PingOneAuditLogs_DataConnectorDefinition.json","true" -"PostgreSQL_CL","PostgreSQL","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PostgreSQL","azuresentinel","azure-sentinel-solution-postgresql","2022-06-27","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PostgreSQL","PostgreSQL","[Deprecated] PostgreSQL Events","PostgreSQL data connector provides the capability to ingest [PostgreSQL](https://www.postgresql.org/) events into Microsoft Sentinel. Refer to [PostgreSQL documentation](https://www.postgresql.org/docs/current/index.html) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PostgreSQL/Data%20Connectors/Connector_PostgreSQL.json","true" -"","Power Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Power%20Platform","","","","","","","","","","","","","","","","false" -"prancer_CL","Prancer PenSuiteAI Integration","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Prancer%20PenSuiteAI%20Integration","prancerenterprise1600813133757","microsoft-sentinel-solution-prancer","2023-08-02","","","Prancer PenSuiteAI Integration","Partner","https://www.prancer.io","","domains","PrancerLogData","Prancer","Prancer Data Connector","The Prancer Data Connector has provides the capability to ingest Prancer (CSPM)[https://docs.prancer.io/web/CSPM/] and [PAC](https://docs.prancer.io/web/PAC/introduction/) data to process through Microsoft Sentinel. Refer to [Prancer Documentation](https://docs.prancer.io/web) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Prancer%20PenSuiteAI%20Integration/Data%20Connectors/PrancerLogData.json","true" -"ProofPointTAPClicksBlocked_CL","ProofPointTap","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap","proofpointinc1600438591120","azure-sentinel-proofpoint","2022-05-23","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointTAP","Proofpoint","[Deprecated] Proofpoint TAP","The [Proofpoint Targeted Attack Protection (TAP)](https://www.proofpoint.com/us/products/advanced-threat-protection/targeted-attack-protection) connector provides the capability to ingest Proofpoint TAP logs and events into Microsoft Sentinel. The connector provides visibility into Message and Click events in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap/Data%20Connectors/ProofpointTAP_API_FunctionApp.json","true" -"ProofPointTAPClicksPermitted_CL","ProofPointTap","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap","proofpointinc1600438591120","azure-sentinel-proofpoint","2022-05-23","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointTAP","Proofpoint","[Deprecated] Proofpoint TAP","The [Proofpoint Targeted Attack Protection (TAP)](https://www.proofpoint.com/us/products/advanced-threat-protection/targeted-attack-protection) connector provides the capability to ingest Proofpoint TAP logs and events into Microsoft Sentinel. The connector provides visibility into Message and Click events in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap/Data%20Connectors/ProofpointTAP_API_FunctionApp.json","true" -"ProofPointTAPMessagesBlocked_CL","ProofPointTap","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap","proofpointinc1600438591120","azure-sentinel-proofpoint","2022-05-23","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointTAP","Proofpoint","[Deprecated] Proofpoint TAP","The [Proofpoint Targeted Attack Protection (TAP)](https://www.proofpoint.com/us/products/advanced-threat-protection/targeted-attack-protection) connector provides the capability to ingest Proofpoint TAP logs and events into Microsoft Sentinel. The connector provides visibility into Message and Click events in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap/Data%20Connectors/ProofpointTAP_API_FunctionApp.json","true" -"ProofPointTAPMessagesDelivered_CL","ProofPointTap","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap","proofpointinc1600438591120","azure-sentinel-proofpoint","2022-05-23","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointTAP","Proofpoint","[Deprecated] Proofpoint TAP","The [Proofpoint Targeted Attack Protection (TAP)](https://www.proofpoint.com/us/products/advanced-threat-protection/targeted-attack-protection) connector provides the capability to ingest Proofpoint TAP logs and events into Microsoft Sentinel. The connector provides visibility into Message and Click events in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap/Data%20Connectors/ProofpointTAP_API_FunctionApp.json","true" -"ProofPointTAPClicksBlockedV2_CL","ProofPointTap","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap","proofpointinc1600438591120","azure-sentinel-proofpoint","2022-05-23","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointTAPv2","Proofpoint","Proofpoint TAP (via Codeless Connector Platform)","The [Proofpoint Targeted Attack Protection (TAP)](https://www.proofpoint.com/us/products/advanced-threat-protection/targeted-attack-protection) connector provides the capability to ingest Proofpoint TAP logs and events into Microsoft Sentinel. The connector provides visibility into Message and Click events in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap/Data%20Connectors/ProofpointTAP_CCP/ProofpointTAP_defination.json","true" -"ProofPointTAPClicksPermittedV2_CL","ProofPointTap","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap","proofpointinc1600438591120","azure-sentinel-proofpoint","2022-05-23","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointTAPv2","Proofpoint","Proofpoint TAP (via Codeless Connector Platform)","The [Proofpoint Targeted Attack Protection (TAP)](https://www.proofpoint.com/us/products/advanced-threat-protection/targeted-attack-protection) connector provides the capability to ingest Proofpoint TAP logs and events into Microsoft Sentinel. The connector provides visibility into Message and Click events in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap/Data%20Connectors/ProofpointTAP_CCP/ProofpointTAP_defination.json","true" -"ProofPointTAPMessagesBlockedV2_CL","ProofPointTap","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap","proofpointinc1600438591120","azure-sentinel-proofpoint","2022-05-23","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointTAPv2","Proofpoint","Proofpoint TAP (via Codeless Connector Platform)","The [Proofpoint Targeted Attack Protection (TAP)](https://www.proofpoint.com/us/products/advanced-threat-protection/targeted-attack-protection) connector provides the capability to ingest Proofpoint TAP logs and events into Microsoft Sentinel. The connector provides visibility into Message and Click events in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap/Data%20Connectors/ProofpointTAP_CCP/ProofpointTAP_defination.json","true" -"ProofPointTAPMessagesDeliveredV2_CL","ProofPointTap","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap","proofpointinc1600438591120","azure-sentinel-proofpoint","2022-05-23","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointTAPv2","Proofpoint","Proofpoint TAP (via Codeless Connector Platform)","The [Proofpoint Targeted Attack Protection (TAP)](https://www.proofpoint.com/us/products/advanced-threat-protection/targeted-attack-protection) connector provides the capability to ingest Proofpoint TAP logs and events into Microsoft Sentinel. The connector provides visibility into Message and Click events in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap/Data%20Connectors/ProofpointTAP_CCP/ProofpointTAP_defination.json","true" -"ProofpointPODMailLog_CL","Proofpoint On demand(POD) Email Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security","proofpointinc1600438591120","azure-sentinel-proofpointpod","2021-03-31","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointCCPDefinition","Proofpoint","Proofpoint On Demand Email Security (via Codeless Connector Platform)","Proofpoint On Demand Email Security data connector provides the capability to get Proofpoint on Demand Email Protection data, allows users to check message traceability, monitoring into email activity, threats,and data exfiltration by attackers and malicious insiders. The connector provides ability to review events in your org on an accelerated basis, get event log files in hourly increments for recent activity.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security/Data%20Connectors/ProofPointEmailSecurity_CCP/ProofpointPOD_Definaton.json","true" -"ProofpointPODMessage_CL","Proofpoint On demand(POD) Email Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security","proofpointinc1600438591120","azure-sentinel-proofpointpod","2021-03-31","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointCCPDefinition","Proofpoint","Proofpoint On Demand Email Security (via Codeless Connector Platform)","Proofpoint On Demand Email Security data connector provides the capability to get Proofpoint on Demand Email Protection data, allows users to check message traceability, monitoring into email activity, threats,and data exfiltration by attackers and malicious insiders. The connector provides ability to review events in your org on an accelerated basis, get event log files in hourly increments for recent activity.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security/Data%20Connectors/ProofPointEmailSecurity_CCP/ProofpointPOD_Definaton.json","true" -"ProofpointPODMessage_CL","Proofpoint On demand(POD) Email Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security","proofpointinc1600438591120","azure-sentinel-proofpointpod","2021-03-31","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointPOD","Proofpoint","[Deprecated] Proofpoint On Demand Email Security","Proofpoint On Demand Email Security data connector provides the capability to get Proofpoint on Demand Email Protection data, allows users to check message traceability, monitoring into email activity, threats,and data exfiltration by attackers and malicious insiders. The connector provides ability to review events in your org on an accelerated basis, get event log files in hourly increments for recent activity.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security/Data%20Connectors/ProofpointPOD_API_FunctionApp.json","true" -"ProofpointPOD_maillog_CL","Proofpoint On demand(POD) Email Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security","proofpointinc1600438591120","azure-sentinel-proofpointpod","2021-03-31","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointPOD","Proofpoint","[Deprecated] Proofpoint On Demand Email Security","Proofpoint On Demand Email Security data connector provides the capability to get Proofpoint on Demand Email Protection data, allows users to check message traceability, monitoring into email activity, threats,and data exfiltration by attackers and malicious insiders. The connector provides ability to review events in your org on an accelerated basis, get event log files in hourly increments for recent activity.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security/Data%20Connectors/ProofpointPOD_API_FunctionApp.json","true" -"ProofpointPOD_message_CL","Proofpoint On demand(POD) Email Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security","proofpointinc1600438591120","azure-sentinel-proofpointpod","2021-03-31","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointPOD","Proofpoint","[Deprecated] Proofpoint On Demand Email Security","Proofpoint On Demand Email Security data connector provides the capability to get Proofpoint on Demand Email Protection data, allows users to check message traceability, monitoring into email activity, threats,and data exfiltration by attackers and malicious insiders. The connector provides ability to review events in your org on an accelerated basis, get event log files in hourly increments for recent activity.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security/Data%20Connectors/ProofpointPOD_API_FunctionApp.json","true" -"maillog_CL","Proofpoint On demand(POD) Email Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security","proofpointinc1600438591120","azure-sentinel-proofpointpod","2021-03-31","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointPOD","Proofpoint","[Deprecated] Proofpoint On Demand Email Security","Proofpoint On Demand Email Security data connector provides the capability to get Proofpoint on Demand Email Protection data, allows users to check message traceability, monitoring into email activity, threats,and data exfiltration by attackers and malicious insiders. The connector provides ability to review events in your org on an accelerated basis, get event log files in hourly increments for recent activity.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security/Data%20Connectors/ProofpointPOD_API_FunctionApp.json","true" -"Syslog","Pulse Connect Secure","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Pulse%20Connect%20Secure","azuresentinel","azure-sentinel-solution-pulseconnectsecure","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PulseConnectSecure","Pulse Secure","[Deprecated] Pulse Connect Secure","The [Pulse Connect Secure](https://www.pulsesecure.net/products/pulse-connect-secure/) connector allows you to easily connect your Pulse Connect Secure logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigations. Integrating Pulse Connect Secure with Microsoft Sentinel provides more insight into your organization's network and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Pulse%20Connect%20Secure/Data%20Connectors/Connector_Syslog_PulseConnectSecure.json","true" -"","Pure Storage","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Pure%20Storage","purestoragemarketplaceadmin","microsoft-sentinel-solution-purestorage","2024-02-05","","","purestoragemarketplaceadmin","Partner","https://support.purestorage.com","","domains","","","","","","false" -"QualysKB_CL","Qualys VM Knowledgebase","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Qualys%20VM%20Knowledgebase","azuresentinel","azure-sentinel-solution-qualysvmknowledgebase","2022-05-17","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","QualysKB","Qualys","Qualys VM KnowledgeBase","The [Qualys Vulnerability Management (VM)](https://www.qualys.com/apps/vulnerability-management/) KnowledgeBase (KB) connector provides the capability to ingest the latest vulnerability data from the Qualys KB into Microsoft Sentinel.

This data can used to correlate and enrich vulnerability detections found by the [Qualys Vulnerability Management (VM)](https://docs.microsoft.com/azure/sentinel/connect-qualys-vm) data connector.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Qualys%20VM%20Knowledgebase/Data%20Connectors/QualysKB_API_FunctionApp.json","true" -"QualysHostDetectionV3_CL","QualysVM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/QualysVM","azuresentinel","azure-sentinel-qualysvm","2020-12-14","2025-11-18","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","QualysVMLogsCCPDefinition","Microsoft","Qualys Vulnerability Management (via Codeless Connector Framework)","The [Qualys Vulnerability Management (VM)](https://www.qualys.com/apps/vulnerability-management/) data connector provides the capability to ingest vulnerability host detection data into Microsoft Sentinel through the Qualys API. The connector provides visibility into host detection data from vulerability scans.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/QualysVM/Data%20Connectors/QualysVMHostLogs_ccp/QualysVMHostLogs_ConnectorDefinition.json","true" -"QualysHostDetectionV2_CL","QualysVM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/QualysVM","azuresentinel","azure-sentinel-qualysvm","2020-12-14","2025-11-18","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","QualysVulnerabilityManagement","Qualys","[DEPRECATED] Qualys Vulnerability Management","The [Qualys Vulnerability Management (VM)](https://www.qualys.com/apps/vulnerability-management/) data connector provides the capability to ingest vulnerability host detection data into Microsoft Sentinel through the Qualys API. The connector provides visibility into host detection data from vulerability scans. This connector provides Microsoft Sentinel the capability to view dashboards, create custom alerts, and improve investigation

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/QualysVM/Data%20Connectors/QualysVM_API_FunctionApp.json","true" -"QualysHostDetection_CL","QualysVM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/QualysVM","azuresentinel","azure-sentinel-qualysvm","2020-12-14","2025-11-18","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","QualysVulnerabilityManagement","Qualys","[DEPRECATED] Qualys Vulnerability Management","The [Qualys Vulnerability Management (VM)](https://www.qualys.com/apps/vulnerability-management/) data connector provides the capability to ingest vulnerability host detection data into Microsoft Sentinel through the Qualys API. The connector provides visibility into host detection data from vulerability scans. This connector provides Microsoft Sentinel the capability to view dashboards, create custom alerts, and improve investigation

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/QualysVM/Data%20Connectors/QualysVM_API_FunctionApp.json","true" -"QscoutAppEvents_CL","Quokka","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Quokka","quokka","azure-sentinel-solution-quokka","2025-10-30","","","Quokka","Partner","https://www.quokka.io/contact-us#customer-support","","domains","QscoutAppEventsCCFDefinition","Quokka","QscoutAppEventsConnector","Ingest Qscout application events into Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Quokka/Data%20Connectors/QuokkaQscoutAppEventsLogs_ccf/QuokkaQscoutAppEventsLogs_connectorDefinition.json","true" -"Syslog","RSA SecurID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RSA%20SecurID","azuresentinel","azure-sentinel-solution-securid","2021-09-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","RSASecurIDAM","RSA","[Deprecated] RSA® SecurID (Authentication Manager)","The [RSA® SecurID Authentication Manager](https://www.securid.com/) data connector provides the capability to ingest [RSA® SecurID Authentication Manager events](https://community.rsa.com/t5/rsa-authentication-manager/rsa-authentication-manager-log-messages/ta-p/630160) into Microsoft Sentinel. Refer to [RSA® SecurID Authentication Manager documentation](https://community.rsa.com/t5/rsa-authentication-manager/getting-started-with-rsa-authentication-manager/ta-p/569582) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RSA%20SecurID/Data%20Connectors/RSASecurID.json","true" -"RSAIDPlus_AdminLogs_CL","RSAIDPlus_AdminLogs_Connector","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RSAIDPlus_AdminLogs_Connector","rsasecurity1687281258544","azure-sentinel-solution-rsa_id_plus_admin_log","2025-10-14","","","RSA Support Team","Partner","https://community.rsa.com/","","domains,verticals","RSAIDPlus_AdmingLogs_Connector","RSA","RSA ID Plus Admin Logs Connector","The RSA ID Plus AdminLogs Connector provides the capability to ingest [Cloud Admin Console Audit Events](https://community.rsa.com/s/article/Cloud-Administration-Event-Log-API-5d22ba17) into Microsoft Sentinel using Cloud Admin APIs.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RSAIDPlus_AdminLogs_Connector/Data%20Connectors/RSIDPlus_AdminLogs_Connector_CCP/RSAIDPlus_AdminLogs_ConnectorDefinition.json","true" -"CommonSecurityLog","Radiflow","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Radiflow","radiflow","azure-sentinel-solution-radiflow","2024-06-26","","","Radiflow","Partner","https://www.radiflow.com","","domains","RadiflowIsid","Radiflow","Radiflow iSID via AMA","iSID enables non-disruptive monitoring of distributed ICS networks for changes in topology and behavior, using multiple security packages, each offering a unique capability pertaining to a specific type of network activity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Radiflow/Data%20Connectors/RadiflowIsid.json","true" -"NexposeInsightVMCloud_assets_CL","Rapid7InsightVM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Rapid7InsightVM","azuresentinel","azure-sentinel-solution-rapid7insightvm","2021-07-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","InsightVMCloudAPI","Rapid7","Rapid7 Insight Platform Vulnerability Management Reports","The [Rapid7 Insight VM](https://www.rapid7.com/products/insightvm/) Report data connector provides the capability to ingest Scan reports and vulnerability data into Microsoft Sentinel through the REST API from the Rapid7 Insight platform (Managed in the cloud). Refer to [API documentation](https://docs.rapid7.com/insight/api-overview/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Rapid7InsightVM/Data%20Connectors/InsightVMCloud_API_FunctionApp.json","true" -"NexposeInsightVMCloud_vulnerabilities_CL","Rapid7InsightVM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Rapid7InsightVM","azuresentinel","azure-sentinel-solution-rapid7insightvm","2021-07-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","InsightVMCloudAPI","Rapid7","Rapid7 Insight Platform Vulnerability Management Reports","The [Rapid7 Insight VM](https://www.rapid7.com/products/insightvm/) Report data connector provides the capability to ingest Scan reports and vulnerability data into Microsoft Sentinel through the REST API from the Rapid7 Insight platform (Managed in the cloud). Refer to [API documentation](https://docs.rapid7.com/insight/api-overview/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Rapid7InsightVM/Data%20Connectors/InsightVMCloud_API_FunctionApp.json","true" -"","Recorded Future","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Recorded%20Future","recordedfuture1605638642586","recorded_future_sentinel_solution","2021-11-01","2023-09-19","","Recorded Future Support Team","Partner","http://support.recordedfuture.com/","","domains","","","","","","false" -"","Recorded Future Identity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Recorded%20Future%20Identity","recordedfuture1605638642586","recorded_future_identity_solution","2022-09-06","2025-04-02","","Recorded Future Support Team","Partner","https://support.recordedfuture.com/","","domains","","","","","","false" -"RedCanaryDetections_CL","Red Canary","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Red%20Canary","Red Canary","microsoft-sentinel-solution-RedCanary","2022-03-04","2022-03-04","","Red Canary","Partner","https://www.redcanary.com","","domains","RedCanaryDataConnector","Red Canary","Red Canary Threat Detection","The Red Canary data connector provides the capability to ingest published Detections into Microsoft Sentinel using the Data Collector REST API.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Red%20Canary/Data%20Connectors/RedCanaryDataConnector.json","true" -"","ReversingLabs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ReversingLabs","reversinglabs1597673283347","rl_offer_content_hub_aoae","2022-08-08","2024-07-17","","ReversingLabs","Partner","https://support.reversinglabs.com/hc/en-us","","domains","","","","","","false" -"CommonSecurityLog","RidgeSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RidgeSecurity","ridgesecuritytechnologyinc1670890478389","microsoft-sentinel-solution-ridgesecurity","2023-10-23","2023-10-23","","RidgeSecurity","Partner","https://ridgesecurity.ai/about-us/","","domains","RidgeBotDataConnector","RidgeSecurity","[Deprecated] RIDGEBOT - data connector for Microsoft Sentinel","The RidgeBot connector lets users connect RidgeBot with Microsoft Sentinel, allowing creation of Dashboards, Workbooks, Notebooks and Alerts.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RidgeSecurity/Data%20Connectors/RidgeSecurity.json","true" -"","RiskIQ","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RiskIQ","azuresentinel","azure-sentinel-solution-riskiq","2021-10-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"Rubrik_Anomaly_Data_CL","RubrikSecurityCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RubrikSecurityCloud","rubrik_inc","rubrik_sentinel","2022-07-19","2025-07-25","","Rubrik","Partner","https://support.rubrik.com","","domains","RubrikSecurityCloudAzureFunctions","Rubrik, Inc","Rubrik Security Cloud data connector","The Rubrik Security Cloud data connector enables security operations teams to integrate insights from Rubrik's Data Observability services into Microsoft Sentinel. The insights include identification of anomalous filesystem behavior associated with ransomware and mass deletion, assess the blast radius of a ransomware attack, and sensitive data operators to prioritize and more rapidly investigate potential incidents.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RubrikSecurityCloud/Data%20Connectors/RubrikWebhookEvents/RubrikWebhookEvents_FunctionApp.json","true" -"Rubrik_Events_Data_CL","RubrikSecurityCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RubrikSecurityCloud","rubrik_inc","rubrik_sentinel","2022-07-19","2025-07-25","","Rubrik","Partner","https://support.rubrik.com","","domains","RubrikSecurityCloudAzureFunctions","Rubrik, Inc","Rubrik Security Cloud data connector","The Rubrik Security Cloud data connector enables security operations teams to integrate insights from Rubrik's Data Observability services into Microsoft Sentinel. The insights include identification of anomalous filesystem behavior associated with ransomware and mass deletion, assess the blast radius of a ransomware attack, and sensitive data operators to prioritize and more rapidly investigate potential incidents.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RubrikSecurityCloud/Data%20Connectors/RubrikWebhookEvents/RubrikWebhookEvents_FunctionApp.json","true" -"Rubrik_Ransomware_Data_CL","RubrikSecurityCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RubrikSecurityCloud","rubrik_inc","rubrik_sentinel","2022-07-19","2025-07-25","","Rubrik","Partner","https://support.rubrik.com","","domains","RubrikSecurityCloudAzureFunctions","Rubrik, Inc","Rubrik Security Cloud data connector","The Rubrik Security Cloud data connector enables security operations teams to integrate insights from Rubrik's Data Observability services into Microsoft Sentinel. The insights include identification of anomalous filesystem behavior associated with ransomware and mass deletion, assess the blast radius of a ransomware attack, and sensitive data operators to prioritize and more rapidly investigate potential incidents.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RubrikSecurityCloud/Data%20Connectors/RubrikWebhookEvents/RubrikWebhookEvents_FunctionApp.json","true" -"Rubrik_ThreatHunt_Data_CL","RubrikSecurityCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RubrikSecurityCloud","rubrik_inc","rubrik_sentinel","2022-07-19","2025-07-25","","Rubrik","Partner","https://support.rubrik.com","","domains","RubrikSecurityCloudAzureFunctions","Rubrik, Inc","Rubrik Security Cloud data connector","The Rubrik Security Cloud data connector enables security operations teams to integrate insights from Rubrik's Data Observability services into Microsoft Sentinel. The insights include identification of anomalous filesystem behavior associated with ransomware and mass deletion, assess the blast radius of a ransomware attack, and sensitive data operators to prioritize and more rapidly investigate potential incidents.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RubrikSecurityCloud/Data%20Connectors/RubrikWebhookEvents/RubrikWebhookEvents_FunctionApp.json","true" -"","SAP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP","","","","","","","","","","","","","","","","false" -"SAPBTPAuditLog_CL","SAP BTP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20BTP","sentinel4sap","sap_btp_sentinel_solution","2023-04-04","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SAPBTPAuditEvents","Microsoft","SAP BTP","SAP Business Technology Platform (SAP BTP) brings together data management, analytics, artificial intelligence, application development, automation, and integration in one, unified environment.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20BTP/Data%20Connectors/SAPBTPPollerConnector/SAPBTP_DataConnectorDefinition.json","true" -"SAPETDAlerts_CL","SAP ETD Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20ETD%20Cloud","sap_jasondau","azure-sentinel-solution-sapetd","2025-02-17","2025-09-11","","SAP","Partner","https://help.sap.com/docs/SAP_ENTERPRISE_THREAT_DETECTION_CLOUD_EDITION","","domains","SAPETDAlerts","SAP","SAP Enterprise Threat Detection, cloud edition","The SAP Enterprise Threat Detection, cloud edition (ETD) data connector enables ingestion of security alerts from ETD into Microsoft Sentinel, supporting cross-correlation, alerting, and threat hunting.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20ETD%20Cloud/Data%20Connectors/SAPETD_PUSH_CCP/SAPETD_connectorDefinition.json","true" -"SAPETDInvestigations_CL","SAP ETD Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20ETD%20Cloud","sap_jasondau","azure-sentinel-solution-sapetd","2025-02-17","2025-09-11","","SAP","Partner","https://help.sap.com/docs/SAP_ENTERPRISE_THREAT_DETECTION_CLOUD_EDITION","","domains","SAPETDAlerts","SAP","SAP Enterprise Threat Detection, cloud edition","The SAP Enterprise Threat Detection, cloud edition (ETD) data connector enables ingestion of security alerts from ETD into Microsoft Sentinel, supporting cross-correlation, alerting, and threat hunting.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20ETD%20Cloud/Data%20Connectors/SAPETD_PUSH_CCP/SAPETD_connectorDefinition.json","true" -"SAPLogServ_CL","SAP LogServ","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20LogServ","sap_jasondau","azure-sentinel-solution-saplogserv","2025-02-17","2025-07-18","","SAP","Partner","https://community.sap.com/t5/enterprise-resource-planning-blogs-by-sap/announcing-limited-preview-of-sap-logserv-integration-with-microsoft/ba-p/13942180","","domains","SAPLogServ","SAP SE","SAP LogServ (RISE), S/4HANA Cloud private edition","SAP LogServ is an SAP Enterprise Cloud Services (ECS) service aimed at collection, storage, forwarding and access of logs. LogServ centralizes the logs from all systems, applications, and ECS services used by a registered customer.
Main Features include:
Near Realtime Log Collection: With ability to integrate into Microsoft Sentinel as SIEM solution.
LogServ complements the existing SAP application layer threat monitoring and detections in Microsoft Sentinel with the log types owned by SAP ECS as the system provider. This includes logs like: SAP Security Audit Log (AS ABAP), HANA database, AS JAVA, ICM, SAP Web Dispatcher, SAP Cloud Connector, OS, SAP Gateway, 3rd party Database, Network, DNS, Proxy, Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20LogServ/Data%20Connectors/SAPLogServ.json;https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20LogServ/Data%20Connectors/SAPLogServ_PUSH_CCP/SAPLogServ_connectorDefinition.json","false" -"ABAPAuditLog","SAP S4 Cloud Public Edition","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20S4%20Cloud%20Public%20Edition","sap_jasondau","azure-sentinel-solution-s4hana-public","2025-09-12","","","SAP","Partner","https://api.sap.com/api/SecurityAuditLog_ODataService/overview","","domains","SAPS4PublicAlerts","SAP","SAP S/4HANA Cloud Public Edition","The SAP S/4HANA Cloud Public Edition (GROW with SAP) data connector enables ingestion of SAP's security audit log into the Microsoft Sentinel Solution for SAP, supporting cross-correlation, alerting, and threat hunting. Looking for alternative authentication mechanisms? See [here](https://github.com/Azure-Samples/Sentinel-For-SAP-Community/tree/main/integration-artifacts).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20S4%20Cloud%20Public%20Edition/Data%20Connectors/SAPS4PublicPollerConnector/SAPS4Public_connectorDefinition.json","true" -"SIGNL4_CL","SIGNL4","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SIGNL4","derdack","azure-sentinel-solution-signl4","2021-12-10","2021-12-10","","Derdack","Partner","https://www.signl4.com","","domains","DerdackSIGNL4","Derdack","Derdack SIGNL4","When critical systems fail or security incidents happen, SIGNL4 bridges the ‘last mile’ to your staff, engineers, IT admins and workers in the field. It adds real-time mobile alerting to your services, systems, and processes in no time. SIGNL4 notifies through persistent mobile push, SMS text and voice calls with acknowledgement, tracking and escalation. Integrated duty and shift scheduling ensure the right people are alerted at the right time.

[Learn more >](https://www.signl4.com)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SIGNL4/Data%20Connectors/DerdackSIGNL4.json","true" -"SecurityIncident","SIGNL4","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SIGNL4","derdack","azure-sentinel-solution-signl4","2021-12-10","2021-12-10","","Derdack","Partner","https://www.signl4.com","","domains","DerdackSIGNL4","Derdack","Derdack SIGNL4","When critical systems fail or security incidents happen, SIGNL4 bridges the ‘last mile’ to your staff, engineers, IT admins and workers in the field. It adds real-time mobile alerting to your services, systems, and processes in no time. SIGNL4 notifies through persistent mobile push, SMS text and voice calls with acknowledgement, tracking and escalation. Integrated duty and shift scheduling ensure the right people are alerted at the right time.

[Learn more >](https://www.signl4.com)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SIGNL4/Data%20Connectors/DerdackSIGNL4.json","true" -"SINECSecurityGuard_CL","SINEC Security Guard","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SINEC%20Security%20Guard","siemensplmsoftware","azure-sentinel-solution-ssg","2024-07-15","","","Siemens AG","Partner","https://siemens.com/sinec-security-guard","","domains,verticals","SSG","Siemens AG","SINEC Security Guard","The SINEC Security Guard solution for Microsoft Sentinel allows you to ingest security events of your industrial networks from the [SINEC Security Guard](https://siemens.com/sinec-security-guard) into Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SINEC%20Security%20Guard/Data%20Connectors/data_connector_GenericUI.json","true" -"","SOC Handbook","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SOC%20Handbook","microsoftsentinelcommunity","azure-sentinel-solution-sochandbook","2022-11-30","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","","","","","","false" -"","SOC-Process-Framework","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SOC-Process-Framework","azuresentinel","azure-sentinel-solution-socprocessframework","2022-04-08","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"SailPointIDN_Events_CL","SailPointIdentityNow","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SailPointIdentityNow","sailpoint1582673310610","sentinel_offering","2021-10-26","","","SailPoint","Partner","","","domains","SailPointIdentityNow","SailPoint","SailPoint IdentityNow","The [SailPoint](https://www.sailpoint.com/) IdentityNow data connector provides the capability to ingest [SailPoint IdentityNow] search events into Microsoft Sentinel through the REST API. The connector provides customers the ability to extract audit information from their IdentityNow tenant. It is intended to make it even easier to bring IdentityNow user activity and governance events into Microsoft Sentinel to improve insights from your security incident and event monitoring solution.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SailPointIdentityNow/Data%20Connectors/SailPoint_IdentityNow_FunctionApp.json","true" -"SailPointIDN_Triggers_CL","SailPointIdentityNow","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SailPointIdentityNow","sailpoint1582673310610","sentinel_offering","2021-10-26","","","SailPoint","Partner","","","domains","SailPointIdentityNow","SailPoint","SailPoint IdentityNow","The [SailPoint](https://www.sailpoint.com/) IdentityNow data connector provides the capability to ingest [SailPoint IdentityNow] search events into Microsoft Sentinel through the REST API. The connector provides customers the ability to extract audit information from their IdentityNow tenant. It is intended to make it even easier to bring IdentityNow user activity and governance events into Microsoft Sentinel to improve insights from your security incident and event monitoring solution.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SailPointIdentityNow/Data%20Connectors/SailPoint_IdentityNow_FunctionApp.json","true" -"","SalemCyber","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SalemCyber","saleminc1627928803559","salem-cyber-ai-analyst","2023-07-21","2023-07-21","","Salem Cyber","Partner","https://www.salemcyber.com/contact","","domains","","","","","","false" -"SalesforceServiceCloudV2_CL","Salesforce Service Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Salesforce%20Service%20Cloud","azuresentinel","azure-sentinel-solution-salesforceservicecloud","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SalesforceServiceCloud","Salesforce","[DEPRECATED] Salesforce Service Cloud","The Salesforce Service Cloud data connector provides the capability to ingest information about your Salesforce operational events into Microsoft Sentinel through the REST API. The connector provides ability to review events in your org on an accelerated basis, get [event log files](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/event_log_file_hourly_overview.htm) in hourly increments for recent activity.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Salesforce%20Service%20Cloud/Data%20Connectors/SalesforceServiceCloud_API_FunctionApp.json","true" -"SalesforceServiceCloud_CL","Salesforce Service Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Salesforce%20Service%20Cloud","azuresentinel","azure-sentinel-solution-salesforceservicecloud","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SalesforceServiceCloud","Salesforce","[DEPRECATED] Salesforce Service Cloud","The Salesforce Service Cloud data connector provides the capability to ingest information about your Salesforce operational events into Microsoft Sentinel through the REST API. The connector provides ability to review events in your org on an accelerated basis, get [event log files](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/event_log_file_hourly_overview.htm) in hourly increments for recent activity.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Salesforce%20Service%20Cloud/Data%20Connectors/SalesforceServiceCloud_API_FunctionApp.json","true" -"SalesforceServiceCloudV2_CL","Salesforce Service Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Salesforce%20Service%20Cloud","azuresentinel","azure-sentinel-solution-salesforceservicecloud","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SalesforceServiceCloudCCPDefinition","Microsoft","Salesforce Service Cloud (via Codeless Connector Framework)","The Salesforce Service Cloud data connector provides the capability to ingest information about your Salesforce operational events into Microsoft Sentinel through the REST API. The connector provides ability to review events in your org on an accelerated basis, get [event log files](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/event_log_file_hourly_overview.htm) in hourly increments for recent activity.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Salesforce%20Service%20Cloud/Data%20Connectors/SalesforceSentinelConnector_CCP/SalesforceServiceCloud_DataConnectorDefinition.json","true" -"Samsung_Knox_Application_CL","Samsung Knox Asset Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence","samsungelectronics1632791654245","azure-sentinel-solution-samsung-knox-kai","2025-01-15","","","Samsung Electronics Co., Ltd.","Partner","https://www2.samsungknox.com/en/support","","domains","SamsungDCDefinition","Samsung","Samsung Knox Asset Intelligence","Samsung Knox Asset Intelligence Data Connector lets you centralize your mobile security events and logs in order to view customized insights using the Workbook template, and identify incidents based on Analytics Rules templates.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence/Data%20Connectors/Template_Samsung.json","true" -"Samsung_Knox_Audit_CL","Samsung Knox Asset Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence","samsungelectronics1632791654245","azure-sentinel-solution-samsung-knox-kai","2025-01-15","","","Samsung Electronics Co., Ltd.","Partner","https://www2.samsungknox.com/en/support","","domains","SamsungDCDefinition","Samsung","Samsung Knox Asset Intelligence","Samsung Knox Asset Intelligence Data Connector lets you centralize your mobile security events and logs in order to view customized insights using the Workbook template, and identify incidents based on Analytics Rules templates.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence/Data%20Connectors/Template_Samsung.json","true" -"Samsung_Knox_Network_CL","Samsung Knox Asset Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence","samsungelectronics1632791654245","azure-sentinel-solution-samsung-knox-kai","2025-01-15","","","Samsung Electronics Co., Ltd.","Partner","https://www2.samsungknox.com/en/support","","domains","SamsungDCDefinition","Samsung","Samsung Knox Asset Intelligence","Samsung Knox Asset Intelligence Data Connector lets you centralize your mobile security events and logs in order to view customized insights using the Workbook template, and identify incidents based on Analytics Rules templates.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence/Data%20Connectors/Template_Samsung.json","true" -"Samsung_Knox_Process_CL","Samsung Knox Asset Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence","samsungelectronics1632791654245","azure-sentinel-solution-samsung-knox-kai","2025-01-15","","","Samsung Electronics Co., Ltd.","Partner","https://www2.samsungknox.com/en/support","","domains","SamsungDCDefinition","Samsung","Samsung Knox Asset Intelligence","Samsung Knox Asset Intelligence Data Connector lets you centralize your mobile security events and logs in order to view customized insights using the Workbook template, and identify incidents based on Analytics Rules templates.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence/Data%20Connectors/Template_Samsung.json","true" -"Samsung_Knox_System_CL","Samsung Knox Asset Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence","samsungelectronics1632791654245","azure-sentinel-solution-samsung-knox-kai","2025-01-15","","","Samsung Electronics Co., Ltd.","Partner","https://www2.samsungknox.com/en/support","","domains","SamsungDCDefinition","Samsung","Samsung Knox Asset Intelligence","Samsung Knox Asset Intelligence Data Connector lets you centralize your mobile security events and logs in order to view customized insights using the Workbook template, and identify incidents based on Analytics Rules templates.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence/Data%20Connectors/Template_Samsung.json","true" -"Samsung_Knox_User_CL","Samsung Knox Asset Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence","samsungelectronics1632791654245","azure-sentinel-solution-samsung-knox-kai","2025-01-15","","","Samsung Electronics Co., Ltd.","Partner","https://www2.samsungknox.com/en/support","","domains","SamsungDCDefinition","Samsung","Samsung Knox Asset Intelligence","Samsung Knox Asset Intelligence Data Connector lets you centralize your mobile security events and logs in order to view customized insights using the Workbook template, and identify incidents based on Analytics Rules templates.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence/Data%20Connectors/Template_Samsung.json","true" -"ABAPAuditLog","SecurityBridge App","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityBridge%20App","securitybridge1647511278080","securitybridge-sentinel-app-1","2022-02-17","","","SecurityBridge","Partner","https://securitybridge.com/contact/","","domains,verticals","SecurityBridge","SecurityBridge Group GmbH","SecurityBridge Solution for SAP","SecurityBridge enhances SAP security by integrating seamlessly with Microsoft Sentinel, enabling real-time monitoring and threat detection across SAP environments. This integration allows Security Operations Centers (SOCs) to consolidate SAP security events with other organizational data, providing a unified view of the threat landscape . Leveraging AI-powered analytics and Microsoft’s Security Copilot, SecurityBridge identifies sophisticated attack patterns and vulnerabilities within SAP applications, including ABAP code scanning and configuration assessments . The solution supports scalable deployments across complex SAP landscapes, whether on-premises, in the cloud, or hybrid environments . By bridging the gap between IT and SAP security teams, SecurityBridge empowers organizations to proactively detect, investigate, and respond to threats, enhancing overall security posture.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityBridge%20App/Data%20Connectors/SecurityBridge_PUSH_CCP/SecurityBridge_connectorDefinition.json","true" -"SecurityBridgeLogs_CL","SecurityBridge App","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityBridge%20App","securitybridge1647511278080","securitybridge-sentinel-app-1","2022-02-17","","","SecurityBridge","Partner","https://securitybridge.com/contact/","","domains,verticals","SecurityBridgeSAP","SecurityBridge","SecurityBridge Threat Detection for SAP","SecurityBridge is the first and only holistic, natively integrated security platform, addressing all aspects needed to protect organizations running SAP from internal and external threats against their core business applications. The SecurityBridge platform is an SAP-certified add-on, used by organizations around the globe, and addresses the clients’ need for advanced cybersecurity, real-time monitoring, compliance, code security, and patching to protect against internal and external threats.This Microsoft Sentinel Solution allows you to integrate SecurityBridge Threat Detection events from all your on-premise and cloud based SAP instances into your security monitoring.Use this Microsoft Sentinel Solution to receive normalized and speaking security events, pre-built dashboards and out-of-the-box templates for your SAP security monitoring.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityBridge%20App/Data%20Connectors/Connector_SecurityBridge.json","true" -"SecurityScorecardFactor_CL","SecurityScorecard Cybersecurity Ratings","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityScorecard%20Cybersecurity%20Ratings","SecurityScorecard","SecurityScorecard","2022-10-01","2022-10-01","","SecurityScorecard","Partner","https://support.securityscorecard.com/hc/en-us/requests/new","","domains","SecurityScorecardFactorAzureFunctions","SecurityScorecard","SecurityScorecard Factor","SecurityScorecard is the leader in cybersecurity risk ratings. The [SecurityScorecard](https://www.SecurityScorecard.com/) Factors data connector provides the ability for Sentinel to import SecurityScorecard factor ratings as logs. SecurityScorecard provides ratings for over 12 million companies and domains using countless data points from across the internet. Maintain full awareness of any company's security posture and be able to receive timely updates when factor scores change or drop. SecurityScorecard factor ratings are updated daily based on evidence collected across the web.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityScorecard%20Cybersecurity%20Ratings/Data%20Connectors/SecurityScorecardFactor/SecurityScorecardFactor_API_FunctionApp.json","true" -"SecurityScorecardIssues_CL","SecurityScorecard Cybersecurity Ratings","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityScorecard%20Cybersecurity%20Ratings","SecurityScorecard","SecurityScorecard","2022-10-01","2022-10-01","","SecurityScorecard","Partner","https://support.securityscorecard.com/hc/en-us/requests/new","","domains","SecurityScorecardIssueAzureFunctions","SecurityScorecard","SecurityScorecard Issue","SecurityScorecard is the leader in cybersecurity risk ratings. The [SecurityScorecard](https://www.SecurityScorecard.com/) Issues data connector provides the ability for Sentinel to import SecurityScorecard issue data as logs. SecurityScorecard provides ratings for over 12 million companies and domains using countless data points from across the internet. Maintain full awareness of any company's security posture and be able to receive timely updates when new cybersecurity issues are discovered.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityScorecard%20Cybersecurity%20Ratings/Data%20Connectors/SecurityScorecardIssue/SecurityScorecardIssue_API_FunctionApp.json","true" -"SecurityScorecardRatings_CL","SecurityScorecard Cybersecurity Ratings","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityScorecard%20Cybersecurity%20Ratings","SecurityScorecard","SecurityScorecard","2022-10-01","2022-10-01","","SecurityScorecard","Partner","https://support.securityscorecard.com/hc/en-us/requests/new","","domains","SecurityScorecardRatingsAzureFunctions","SecurityScorecard","SecurityScorecard Cybersecurity Ratings","SecurityScorecard is the leader in cybersecurity risk ratings. The [SecurityScorecard](https://www.SecurityScorecard.com/) data connector provides the ability for Sentinel to import SecurityScorecard ratings as logs. SecurityScorecard provides ratings for over 12 million companies and domains using countless data points from across the internet. Maintain full awareness of any company's security posture and be able to receive timely updates when scores change or drop. SecurityScorecard ratings are updated daily based on evidence collected across the web.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityScorecard%20Cybersecurity%20Ratings/Data%20Connectors/SecurityScorecardRatings/SecurityScorecardRatings_API_FunctionApp.json","true" -"","SecurityThreatEssentialSolution","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityThreatEssentialSolution","azuresentinel","azure-sentinel-solution-securitythreatessentialsol","2022-03-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"SecurityEvent","Semperis Directory Services Protector","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Semperis%20Directory%20Services%20Protector","semperis","directory-services-protector-solution","2021-10-18","","","Semperis","Partner","https://www.semperis.com/contact-us/","","domains","SemperisDSP","SEMPERIS","Semperis Directory Services Protector","Semperis Directory Services Protector data connector allows for the export of its Windows event logs (i.e. Indicators of Exposure and Indicators of Compromise) to Microsoft Sentinel in real time.
It provides a data parser to manipulate the Windows event logs more easily. The different workbooks ease your Active Directory security monitoring and provide different ways to visualize the data. The analytic templates allow to automate responses regarding different events, exposures, or attacks.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Semperis%20Directory%20Services%20Protector/Data%20Connectors/SemperisDSP-connector.json","true" -"SenservaPro_CL","SenservaPro","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SenservaPro","senservallc","senservapro4sentinel","2022-06-01","","","Senserva","Partner","https://www.senserva.com/contact/","","domains","SenservaPro","Senserva","SenservaPro (Preview)","The SenservaPro data connector provides a viewing experience for your SenservaPro scanning logs. View dashboards of your data, use queries to hunt & explore, and create custom alerts.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SenservaPro/Data%20Connectors/SenservaPro.json","true" -"SentinelOne_CL","SentinelOne","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne","azuresentinel","azure-sentinel-solution-sentinelone","2024-11-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SentinelOne","SentinelOne","SentinelOne","The [SentinelOne](https://www.sentinelone.com/) data connector provides the capability to ingest common SentinelOne server objects such as Threats, Agents, Applications, Activities, Policies, Groups, and more events into Microsoft Sentinel through the REST API. Refer to API documentation: `https://.sentinelone.net/api-doc/overview` for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne/Data%20Connectors/SentinelOne_API_FunctionApp.json","true" -"SentinelOneActivities_CL","SentinelOne","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne","azuresentinel","azure-sentinel-solution-sentinelone","2024-11-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SentinelOneCCP","Microsoft","SentinelOne","The [SentinelOne](https://usea1-nessat.sentinelone.net/api-doc/overview) data connector allows ingesting logs from the SentinelOne API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the SentinelOne API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne/Data%20Connectors/SentinelOne_ccp/connectorDefinition.json","true" -"SentinelOneAgents_CL","SentinelOne","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne","azuresentinel","azure-sentinel-solution-sentinelone","2024-11-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SentinelOneCCP","Microsoft","SentinelOne","The [SentinelOne](https://usea1-nessat.sentinelone.net/api-doc/overview) data connector allows ingesting logs from the SentinelOne API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the SentinelOne API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne/Data%20Connectors/SentinelOne_ccp/connectorDefinition.json","true" -"SentinelOneAlerts_CL","SentinelOne","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne","azuresentinel","azure-sentinel-solution-sentinelone","2024-11-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SentinelOneCCP","Microsoft","SentinelOne","The [SentinelOne](https://usea1-nessat.sentinelone.net/api-doc/overview) data connector allows ingesting logs from the SentinelOne API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the SentinelOne API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne/Data%20Connectors/SentinelOne_ccp/connectorDefinition.json","true" -"SentinelOneGroups_CL","SentinelOne","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne","azuresentinel","azure-sentinel-solution-sentinelone","2024-11-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SentinelOneCCP","Microsoft","SentinelOne","The [SentinelOne](https://usea1-nessat.sentinelone.net/api-doc/overview) data connector allows ingesting logs from the SentinelOne API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the SentinelOne API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne/Data%20Connectors/SentinelOne_ccp/connectorDefinition.json","true" -"SentinelOneThreats_CL","SentinelOne","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne","azuresentinel","azure-sentinel-solution-sentinelone","2024-11-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SentinelOneCCP","Microsoft","SentinelOne","The [SentinelOne](https://usea1-nessat.sentinelone.net/api-doc/overview) data connector allows ingesting logs from the SentinelOne API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the SentinelOne API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne/Data%20Connectors/SentinelOne_ccp/connectorDefinition.json","true" -"","SentinelSOARessentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelSOARessentials","azuresentinel","azure-sentinel-solution-sentinelsoaressentials","2022-06-27","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","false" -"SeraphicWebSecurity_CL","SeraphicSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SeraphicSecurity","seraphicalgorithmsltd1616061090462","seraphic-security-sentinel","2023-07-31","2023-07-31","","Seraphic Security","Partner","https://seraphicsecurity.com","","domains","SeraphicWebSecurity","Seraphic","Seraphic Web Security","The Seraphic Web Security data connector provides the capability to ingest [Seraphic Web Security](https://seraphicsecurity.com/) events and alerts into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SeraphicSecurity/Data%20Connectors/SeraphicSecurityConnector.json","true" -"","ServiceNow TISC","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ServiceNow%20TISC","servicenow1594831756316","sentinel-solution-tisc","2025-01-15","2025-01-15","","ServiceNow","Partner","https://support.servicenow.com/now","","domains","","","","","","false" -"","Servicenow","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Servicenow","azuresentinel","azure-sentinel-solution-servicenow","2022-09-19","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","false" -"Sevco_Devices_CL","SevcoSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SevcoSecurity","azuresentinel","azure-sentinel-solution-sevcosecurity","2023-05-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SevcoDevices","Sevco Security","Sevco Platform - Devices","The Sevco Platform - Devices connector allows you to easily connect your Sevco Device Assets with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization’s assets and improves your security operation capabilities.

[For more information >​](https://docs.sev.co/docs/microsoft-sentinel-inventory)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SevcoSecurity/Data%20Connectors/Connector_SevcoSecurity.json","true" -"","ShadowByte Aria","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ShadowByte%20Aria","shadowbyte1641237427416","ariasent1","2021-12-24","","","Shadowbyte","Partner","https://shadowbyte.com/products/aria/","","domains","","","","","","false" -"","Shodan","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Shodan","azuresentinel","azure-sentinel-solution-shodan","2023-02-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"CommonSecurityLog","Silverfort","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Silverfort","silverfort","microsoft-sentinel-solution-silverfort","2024-09-01","","","Silverfort","Partner","https://www.silverfort.com/customer-success/#support","","domains","SilverfortAma","Silverfort","Silverfort Admin Console","The [Silverfort](https://silverfort.com) ITDR Admin Console connector solution allows ingestion of Silverfort events and logging into Microsoft Sentinel.
Silverfort provides syslog based events and logging using Common Event Format (CEF). By forwarding your Silverfort ITDR Admin Console CEF data into Microsoft Sentinel, you can take advantage of Sentinels's search & correlation, alerting, and threat intelligence enrichment on Silverfort data.
Please contact Silverfort or consult the Silverfort documentation for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Silverfort/Data%20Connectors/SilverfortAma.json","true" -"SlackAuditNativePoller_CL","SlackAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SlackAudit","azuresentinel","azure-sentinel-solution-slackaudit","2021-03-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SlackAudit","Slack","Slack","The [Slack](https://slack.com) data connector provides the capability to ingest [Slack Audit Records](https://api.slack.com/admins/audit-logs) events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://api.slack.com/admins/audit-logs#the_audit_event) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more. This data connector uses Microsoft Sentinel native polling capability.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SlackAudit/Data%20Connectors/SlackNativePollerConnector/azuredeploy_Slack_native_poller_connector.json","true" -"SlackAudit_CL","SlackAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SlackAudit","azuresentinel","azure-sentinel-solution-slackaudit","2021-03-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SlackAuditAPI","Slack","[DEPRECATED] Slack Audit","The [Slack](https://slack.com) Audit data connector provides the capability to ingest [Slack Audit Records](https://api.slack.com/admins/audit-logs) events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://api.slack.com/admins/audit-logs#the_audit_event) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SlackAudit/Data%20Connectors/SlackAudit_API_FunctionApp.json","true" -"SlackAuditV2_CL","SlackAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SlackAudit","azuresentinel","azure-sentinel-solution-slackaudit","2021-03-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SlackAuditLogsCCPDefinition","Microsoft","SlackAudit (via Codeless Connector Framework)","The SlackAudit data connector provides the capability to ingest [Slack Audit logs](https://api.slack.com/admins/audit-logs) into Microsoft Sentinel through the REST API. Refer to [API documentation](https://api.slack.com/admins/audit-logs-call) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SlackAudit/Data%20Connectors/SlackAuditLog_CCP/SlackAuditLog_ConnectorDefinition.json","true" -"","SlashNext","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SlashNext","slashnext1598548183597","slashnext-weblog-assessment-for-microsoft-sentinel","2022-08-12","2022-08-12","","SlashNext","Partner","https://support@slashnext.com","","domains","","","","","","false" -"","SlashNext SIEM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SlashNext%20SIEM","slashnext1598548183597","slashnext-security-events-for-microsoft-sentinel","2023-05-26","2023-06-16","","SlashNext","Partner","https://slashnext.com/support","","domains","","","","","","false" -"Snowflake_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeDataConnector","Snowflake","[DEPRECATED] Snowflake","The Snowflake data connector provides the capability to ingest Snowflake [login logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history.html) and [query logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history.html) into Microsoft Sentinel using the Snowflake Python Connector. Refer to [Snowflake documentation](https://docs.snowflake.com/en/user-guide/python-connector.html) for more information.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/Snowflake_API_FunctionApp.json","true" -"SnowflakeLoad_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" -"SnowflakeLogin_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" -"SnowflakeMaterializedView_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" -"SnowflakeQuery_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" -"SnowflakeRoleGrant_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" -"SnowflakeRoles_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" -"SnowflakeTableStorageMetrics_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" -"SnowflakeTables_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" -"SnowflakeUserGrant_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" -"SnowflakeUsers_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" -"CommonSecurityLog","SonicWall Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SonicWall%20Firewall","sonicwall-inc","sonicwall-networksecurity-azure-sentinal","2022-05-06","","","SonicWall","Partner","https://www.sonicwall.com/support/","","domains","SonicWallFirewall","SonicWall","[Deprecated] SonicWall Firewall via Legacy Agent","Common Event Format (CEF) is an industry standard format on top of Syslog messages, used by SonicWall to allow event interoperability among different platforms. By connecting your CEF logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SonicWall%20Firewall/Data%20Connectors/SonicwallFirewall.json","true" -"CommonSecurityLog","SonicWall Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SonicWall%20Firewall","sonicwall-inc","sonicwall-networksecurity-azure-sentinal","2022-05-06","","","SonicWall","Partner","https://www.sonicwall.com/support/","","domains","SonicWallFirewallAma","SonicWall","[Deprecated] SonicWall Firewall via AMA","Common Event Format (CEF) is an industry standard format on top of Syslog messages, used by SonicWall to allow event interoperability among different platforms. By connecting your CEF logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SonicWall%20Firewall/Data%20Connectors/template_SonicwallFirewallAMA.json","true" -"Sonrai_Tickets_CL","SonraiSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SonraiSecurity","sonraisecurityllc1584373214489","sonrai_sentinel_offer","2021-10-18","","","Sonrai","Partner","","","domains","SonraiDataConnector","Sonrai","Sonrai Data Connector","Use this data connector to integrate with Sonrai Security and get Sonrai tickets sent directly to Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SonraiSecurity/Data%20Connectors/Connector_REST_API_Sonrai.json","true" -"SophosCloudOptix_CL","Sophos Cloud Optix","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20Cloud%20Optix","sophos","sophos_cloud_optix_mss","2022-05-02","","","Sophos","Partner","https://www.sophos.com/en-us/support","","domains","SophosCloudOptix","Sophos","Sophos Cloud Optix","The [Sophos Cloud Optix](https://www.sophos.com/products/cloud-optix.aspx) connector allows you to easily connect your Sophos Cloud Optix logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's cloud security and compliance posture and improves your cloud security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20Cloud%20Optix/Data%20Connectors/Connector_REST_API_SophosCloudOptix.json","true" -"SophosEP_CL","Sophos Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-sophosep","2021-07-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SophosEP","Sophos","Sophos Endpoint Protection","The [Sophos Endpoint Protection](https://www.sophos.com/en-us/products/endpoint-antivirus.aspx) data connector provides the capability to ingest [Sophos events](https://docs.sophos.com/central/Customer/help/en-us/central/Customer/common/concepts/Events.html) into Microsoft Sentinel. Refer to [Sophos Central Admin documentation](https://docs.sophos.com/central/Customer/help/en-us/central/Customer/concepts/Logs.html) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20Endpoint%20Protection/Data%20Connectors/SophosEP_API_FunctionApp.json","true" -"SophosEPAlerts_CL","Sophos Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-sophosep","2021-07-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SophosEndpointProtectionCCPDefinition","Microsoft","Sophos Endpoint Protection (using REST API)","The [Sophos Endpoint Protection](https://www.sophos.com/en-us/products/endpoint-antivirus.aspx) data connector provides the capability to ingest [Sophos events](https://developer.sophos.com/docs/siem-v1/1/routes/events/get) and [Sophos alerts](https://developer.sophos.com/docs/siem-v1/1/routes/alerts/get) into Microsoft Sentinel. Refer to [Sophos Central Admin documentation](https://docs.sophos.com/central/Customer/help/en-us/central/Customer/concepts/Logs.html) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20Endpoint%20Protection/Data%20Connectors/SophosEP_ccp/SophosEP_DataConnectorDefinition.json","true" -"SophosEPEvents_CL","Sophos Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-sophosep","2021-07-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SophosEndpointProtectionCCPDefinition","Microsoft","Sophos Endpoint Protection (using REST API)","The [Sophos Endpoint Protection](https://www.sophos.com/en-us/products/endpoint-antivirus.aspx) data connector provides the capability to ingest [Sophos events](https://developer.sophos.com/docs/siem-v1/1/routes/events/get) and [Sophos alerts](https://developer.sophos.com/docs/siem-v1/1/routes/alerts/get) into Microsoft Sentinel. Refer to [Sophos Central Admin documentation](https://docs.sophos.com/central/Customer/help/en-us/central/Customer/concepts/Logs.html) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20Endpoint%20Protection/Data%20Connectors/SophosEP_ccp/SophosEP_DataConnectorDefinition.json","true" -"Syslog","Sophos XG Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20XG%20Firewall","azuresentinel","azure-sentinel-solution-sophosxgfirewall","2021-10-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SophosXGFirewall","Sophos","[Deprecated] Sophos XG Firewall","The [Sophos XG Firewall](https://www.sophos.com/products/next-gen-firewall.aspx) allows you to easily connect your Sophos XG Firewall logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigations. Integrating Sophos XG Firewall with Microsoft Sentinel provides more visibility into your organization's firewall traffic and will enhance security monitoring capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20XG%20Firewall/Data%20Connectors/Connector_Syslog_SophosXGFirewall.json","true" -"","SpyCloud Enterprise Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SpyCloud%20Enterprise%20Protection","spycloudinc1680448518850","azure-sentinel-solution-spycloudenterprise","2023-09-09","","","Spycloud","Partner","https://portal.spycloud.com","","domains","","","","","","false" -"secRMM_CL","Squadra Technologies SecRmm","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Squadra%20Technologies%20SecRmm","squadratechnologies","squadra_technologies_secrmm_mss","2022-05-09","","","Squadra Technologies","Partner","https://www.squadratechnologies.com/Contact.aspx","","domains","SquadraTechnologiesSecRMM","Squadra Technologies","Squadra Technologies secRMM","Use the Squadra Technologies secRMM Data Connector to push USB removable storage security event data into Microsoft Sentinel Log Analytics.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Squadra%20Technologies%20SecRmm/Data%20Connectors/SquadraTechnologiesSecRMM.json","true" -"SquidProxy_CL","SquidProxy","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SquidProxy","azuresentinel","azure-sentinel-solution-squidproxy","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SquidProxy","Squid","[Deprecated] Squid Proxy","The [Squid Proxy](http://www.squid-cache.org/) connector allows you to easily connect your Squid Proxy logs with Microsoft Sentinel. This gives you more insight into your organization's network proxy traffic and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SquidProxy/Data%20Connectors/Connector_CustomLog_SquidProxy.json","true" -"StyxViewAlerts_CL","Styx Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Styx%20Intelligence","styx_intelligence","microsoft-sentinel-solution-styxintelligence","2025-02-07","","","Styx Intelligence","Partner","https://www.styxintel.com/contact-us/","","domains","StyxViewEndpointConnectorDefinition","Styx Intelligence","StyxView Alerts (via Codeless Connector Platform)","The [StyxView Alerts](https://styxintel.com/) data connector enables seamless integration between the StyxView Alerts platform and Microsoft Sentinel. This connector ingests alert data from the StyxView Alerts API, allowing organizations to centralize and correlate actionable threat intelligence directly within their Microsoft Sentinel workspace.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Styx%20Intelligence/Data%20Connectors/Alerts/StyxView%20Alerts_ConnectorDefinition.json","true" -"Syslog","Symantec Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Symantec%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-symantecendpointprotection","2022-07-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SymantecEndpointProtection","Broadcom","[Deprecated] Symantec Endpoint Protection","The [Broadcom Symantec Endpoint Protection (SEP)](https://www.broadcom.com/products/cyber-security/endpoint/end-user/enterprise) connector allows you to easily connect your SEP logs with Microsoft Sentinel. This gives you more insight into your organization's network and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Symantec%20Endpoint%20Protection/Data%20Connectors/Connector_Syslog_SymantecEndpointProtection.json","true" -"SymantecICDx_CL","Symantec Integrated Cyber Defense","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Symantec%20Integrated%20Cyber%20Defense","azuresentinel","symantec_icdx_mss","2022-06-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Symantec","Symantec","Symantec Integrated Cyber Defense Exchange","Symantec ICDx connector allows you to easily connect your Symantec security solutions logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization’s network and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Symantec%20Integrated%20Cyber%20Defense/Data%20Connectors/SymantecICDX.JSON","true" -"Syslog","Symantec VIP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Symantec%20VIP","azuresentinel","azure-sentinel-solution-symantecvip","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SymantecVIP","Symantec","[Deprecated] Symantec VIP","The [Symantec VIP](https://vip.symantec.com/) connector allows you to easily connect your Symantec VIP logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Symantec%20VIP/Data%20Connectors/Connector_Syslog_SymantecVIP.json","true" -"Syslog","SymantecProxySG","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SymantecProxySG","azuresentinel","azure-sentinel-symantec-proxysg","2021-05-25","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SymantecProxySG","Symantec","[Deprecated] Symantec ProxySG","The [Symantec ProxySG](https://www.broadcom.com/products/cyber-security/network/gateway/proxy-sg-and-advanced-secure-gateway) allows you to easily connect your Symantec ProxySG logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigations. Integrating Symantec ProxySG with Microsoft Sentinel provides more visibility into your organization's network proxy traffic and will enhance security monitoring capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SymantecProxySG/Data%20Connectors/Connector_Syslog_SymantecProxySG.json","true" -"","Synack","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Synack","","","","","","","","","","","","","","","","false" -"Syslog","Syslog","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Syslog","azuresentinel","azure-sentinel-solution-syslog","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Syslog","Microsoft","Syslog via Legacy Agent","Syslog is an event logging protocol that is common to Linux. Applications will send messages that may be stored on the local machine or delivered to a Syslog collector. When the Agent for Linux is installed, it configures the local Syslog daemon to forward messages to the agent. The agent then sends the message to the workspace.

[Learn more >](https://aka.ms/sysLogInfo)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Syslog/Data%20Connectors/template_Syslog.json","true" -"Syslog","Syslog","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Syslog","azuresentinel","azure-sentinel-solution-syslog","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SyslogAma","Microsoft","Syslog via AMA","Syslog is an event logging protocol that is common to Linux. Applications will send messages that may be stored on the local machine or delivered to a Syslog collector. When the Agent for Linux is installed, it configures the local Syslog daemon to forward messages to the agent. The agent then sends the message to the workspace.

[Learn more >](https://aka.ms/sysLogInfo)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Syslog/Data%20Connectors/template_SyslogAma.json","true" -"Talon_CL","Talon","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Talon","taloncybersecurityltd1654088115170","talonconnector","2023-01-25","","","Talon Security","Partner","https://docs.console.talon-sec.com/","","domains","TalonLogs","Talon Security","Talon Insights","The Talon Security Logs connector allows you to easily connect your Talon events and audit logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Talon/Data%20Connectors/TalonLogs.json","true" -"","Tanium","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tanium","taniuminc1646329360287","tanium_sentinel_connector","2022-05-16","2025-07-03","","Tanium Inc.","Partner","https://support.tanium.com","","domains","","","","","","false" -"Cymru_Scout_Account_Usage_Data_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" -"Cymru_Scout_Domain_Data_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" -"Cymru_Scout_IP_Data_Communications_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" -"Cymru_Scout_IP_Data_Details_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" -"Cymru_Scout_IP_Data_Fingerprints_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" -"Cymru_Scout_IP_Data_Foundation_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" -"Cymru_Scout_IP_Data_OpenPorts_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" -"Cymru_Scout_IP_Data_PDNS_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" -"Cymru_Scout_IP_Data_Summary_Certs_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" -"Cymru_Scout_IP_Data_Summary_Details_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" -"Cymru_Scout_IP_Data_Summary_Fingerprints_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" -"Cymru_Scout_IP_Data_Summary_OpenPorts_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" -"Cymru_Scout_IP_Data_Summary_PDNS_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" -"Cymru_Scout_IP_Data_x509_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" -"","Teams","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Teams","sentinel4teams","sentinelforteams","2022-02-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","false" -"","Templates","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Templates","","","","","","","","","","","","","","","","false" -"Tenable_IE_CL","Tenable App","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App","tenable","tenable-sentinel-integration","2024-06-06","2025-06-19","","Tenable","Partner","https://www.tenable.com/support/technical-support","","domains","TenableIE","Tenable","Tenable Identity Exposure","Tenable Identity Exposure connector allows Indicators of Exposure, Indicators of Attack and trailflow logs to be ingested into Microsoft Sentinel.The different work books and data parsers allow you to more easily manipulate logs and monitor your Active Directory environment. The analytic templates allow you to automate responses regarding different events, exposures and attacks.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App/Data%20Connectors/TenableIE/TenableIE.json","true" -"Tenable_VM_Asset_CL","Tenable App","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App","tenable","tenable-sentinel-integration","2024-06-06","2025-06-19","","Tenable","Partner","https://www.tenable.com/support/technical-support","","domains","TenableVM","Tenable","Tenable Vulnerability Management","The TVM data connector provides the ability to ingest Asset, Vulnerability, Compliance, WAS assets and WAS vulnerabilities data into Microsoft Sentinel using TVM REST APIs. Refer to [API documentation](https://developer.tenable.com/reference) for more information. The connector provides the ability to get data which helps to examine potential security risks, get insight into your computing assets, diagnose configuration problems and more","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App/Data%20Connectors/TenableVM/TenableVM.json","true" -"Tenable_VM_Compliance_CL","Tenable App","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App","tenable","tenable-sentinel-integration","2024-06-06","2025-06-19","","Tenable","Partner","https://www.tenable.com/support/technical-support","","domains","TenableVM","Tenable","Tenable Vulnerability Management","The TVM data connector provides the ability to ingest Asset, Vulnerability, Compliance, WAS assets and WAS vulnerabilities data into Microsoft Sentinel using TVM REST APIs. Refer to [API documentation](https://developer.tenable.com/reference) for more information. The connector provides the ability to get data which helps to examine potential security risks, get insight into your computing assets, diagnose configuration problems and more","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App/Data%20Connectors/TenableVM/TenableVM.json","true" -"Tenable_VM_Vuln_CL","Tenable App","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App","tenable","tenable-sentinel-integration","2024-06-06","2025-06-19","","Tenable","Partner","https://www.tenable.com/support/technical-support","","domains","TenableVM","Tenable","Tenable Vulnerability Management","The TVM data connector provides the ability to ingest Asset, Vulnerability, Compliance, WAS assets and WAS vulnerabilities data into Microsoft Sentinel using TVM REST APIs. Refer to [API documentation](https://developer.tenable.com/reference) for more information. The connector provides the ability to get data which helps to examine potential security risks, get insight into your computing assets, diagnose configuration problems and more","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App/Data%20Connectors/TenableVM/TenableVM.json","true" -"Tenable_WAS_Asset_CL","Tenable App","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App","tenable","tenable-sentinel-integration","2024-06-06","2025-06-19","","Tenable","Partner","https://www.tenable.com/support/technical-support","","domains","TenableVM","Tenable","Tenable Vulnerability Management","The TVM data connector provides the ability to ingest Asset, Vulnerability, Compliance, WAS assets and WAS vulnerabilities data into Microsoft Sentinel using TVM REST APIs. Refer to [API documentation](https://developer.tenable.com/reference) for more information. The connector provides the ability to get data which helps to examine potential security risks, get insight into your computing assets, diagnose configuration problems and more","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App/Data%20Connectors/TenableVM/TenableVM.json","true" -"Tenable_WAS_Vuln_CL","Tenable App","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App","tenable","tenable-sentinel-integration","2024-06-06","2025-06-19","","Tenable","Partner","https://www.tenable.com/support/technical-support","","domains","TenableVM","Tenable","Tenable Vulnerability Management","The TVM data connector provides the ability to ingest Asset, Vulnerability, Compliance, WAS assets and WAS vulnerabilities data into Microsoft Sentinel using TVM REST APIs. Refer to [API documentation](https://developer.tenable.com/reference) for more information. The connector provides the ability to get data which helps to examine potential security risks, get insight into your computing assets, diagnose configuration problems and more","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App/Data%20Connectors/TenableVM/TenableVM.json","true" -"Tenable_ad_CL","TenableAD","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TenableAD","","","","","","","","","","","Tenable.ad","Tenable","Tenable.ad","Tenable.ad connector allows to export Tenable.ad Indicators of Exposures, trailflow and Indicators of Attacks logs to Azure Sentinel in real time.
It provides a data parser to manipulate the logs more easily. The different workbooks ease your Active Directory monitoring and provide different ways to visualize the data. The analytic templates allow to automate responses regarding different events, exposures, or attacks.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TenableAD/Data%20Connectors/Tenable.ad.json","true" -"Tenable_IO_Assets_CL","TenableIO","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TenableIO","tenable","tenable-sentinel-integration","2022-06-01","","","Tenable","Partner","https://www.tenable.com/support/technical-support","","domains","TenableIOAPI","Tenable","Tenable.io Vulnerability Management","The [Tenable.io](https://www.tenable.com/products/tenable-io) data connector provides the capability to ingest Asset and Vulnerability data into Microsoft Sentinel through the REST API from the Tenable.io platform (Managed in the cloud). Refer to [API documentation](https://developer.tenable.com/reference) for more information. The connector provides the ability to get data which helps to examine potential security risks, get insight into your computing assets, diagnose configuration problems and more","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TenableIO/Data%20Connectors/TenableIO.json","true" -"Tenable_IO_Vuln_CL","TenableIO","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TenableIO","tenable","tenable-sentinel-integration","2022-06-01","","","Tenable","Partner","https://www.tenable.com/support/technical-support","","domains","TenableIOAPI","Tenable","Tenable.io Vulnerability Management","The [Tenable.io](https://www.tenable.com/products/tenable-io) data connector provides the capability to ingest Asset and Vulnerability data into Microsoft Sentinel through the REST API from the Tenable.io platform (Managed in the cloud). Refer to [API documentation](https://developer.tenable.com/reference) for more information. The connector provides the ability to get data which helps to examine potential security risks, get insight into your computing assets, diagnose configuration problems and more","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TenableIO/Data%20Connectors/TenableIO.json","true" -"","TestSolution","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TestSolution","","","","","","","","","","domains","","","","","","false" -"TheHive_CL","TheHive","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TheHive","azuresentinel","azure-sentinel-solution-thehive","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","TheHiveProjectTheHive","TheHive Project","TheHive Project - TheHive","The [TheHive](http://thehive-project.org/) data connector provides the capability to ingest common TheHive events into Microsoft Sentinel through Webhooks. TheHive can notify external system of modification events (case creation, alert update, task assignment) in real time. When a change occurs in the TheHive, an HTTPS POST request with event information is sent to a callback data connector URL. Refer to [Webhooks documentation](https://docs.thehive-project.org/thehive/legacy/thehive3/admin/webhooks/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TheHive/Data%20Connectors/TheHive_Webhooks_FunctionApp.json","true" -"TheomAlerts_CL","Theom","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Theom","theominc1667512729960","theom_sentinel","2022-11-04","","","Theom","Partner","https://www.theom.ai","","domains","Theom","Theom","Theom","Theom Data Connector enables organizations to connect their Theom environment to Microsoft Sentinel. This solution enables users to receive alerts on data security risks, create and enrich incidents, check statistics and trigger SOAR playbooks in Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Theom/Data%20Connectors/Theom.json","true" -"ThreatIntelligenceIndicator","Threat Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence","azuresentinel","azure-sentinel-solution-threatintelligence-taxii","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","MicrosoftDefenderThreatIntelligence","Microsoft","Microsoft Defender Threat Intelligence","Microsoft Sentinel provides you the capability to import threat intelligence generated by Microsoft to enable monitoring, alerting and hunting. Use this data connector to import Indicators of Compromise (IOCs) from Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel. Threat indicators can include IP addresses, domains, URLs, and file hashes, etc.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence/Data%20Connectors/template_MicrosoftDefenderThreatIntelligence.json","true" -"ThreatIntelligenceIndicator","Threat Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence","azuresentinel","azure-sentinel-solution-threatintelligence-taxii","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","PremiumMicrosoftDefenderForThreatIntelligence","Microsoft","Premium Microsoft Defender Threat Intelligence","Microsoft Sentinel provides you the capability to import threat intelligence generated by Microsoft to enable monitoring, alerting and hunting. Use this data connector to import Indicators of Compromise (IOCs) from Premium Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel. Threat indicators can include IP addresses, domains, URLs, and file hashes, etc. Note: This is a paid connector. To use and ingest data from it, please purchase the ""MDTI API Access"" SKU from the Partner Center.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence/Data%20Connectors/template_PremiumMicrosoftDefenderThreatIntelligence.json","true" -"CommonSecurityLog","Threat Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence","azuresentinel","azure-sentinel-solution-threatintelligence-taxii","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligence","Microsoft","Threat Intelligence Platforms","Microsoft Sentinel integrates with Microsoft Graph Security API data sources to enable monitoring, alerting, and hunting using your threat intelligence. Use this connector to send threat indicators to Microsoft Sentinel from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MindMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, and file hashes. For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2223729&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence/Data%20Connectors/template_ThreatIntelligence.json","true" -"ThreatIntelligenceIndicator","Threat Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence","azuresentinel","azure-sentinel-solution-threatintelligence-taxii","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligence","Microsoft","Threat Intelligence Platforms","Microsoft Sentinel integrates with Microsoft Graph Security API data sources to enable monitoring, alerting, and hunting using your threat intelligence. Use this connector to send threat indicators to Microsoft Sentinel from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MindMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, and file hashes. For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2223729&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence/Data%20Connectors/template_ThreatIntelligence.json","true" -"ThreatIntelligenceIndicator","Threat Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence","azuresentinel","azure-sentinel-solution-threatintelligence-taxii","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligenceTaxii","Microsoft","Threat intelligence - TAXII","Microsoft Sentinel integrates with TAXII 2.0 and 2.1 data sources to enable monitoring, alerting, and hunting using your threat intelligence. Use this connector to send the supported STIX object types from TAXII servers to Microsoft Sentinel. Threat indicators can include IP addresses, domains, URLs, and file hashes. For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2224105&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence/Data%20Connectors/template_ThreatIntelligenceTaxii.json","true" -"ThreatIntelligenceIndicator","Threat Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence","azuresentinel","azure-sentinel-solution-threatintelligence-taxii","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligenceUploadIndicatorsAPI","Microsoft","Threat Intelligence Upload API (Preview)","Microsoft Sentinel offers a data plane API to bring in threat intelligence from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, file hashes and email addresses. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2269830&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence/Data%20Connectors/template_ThreatIntelligenceUploadIndicators.json;https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence/Data%20Connectors/template_ThreatIntelligenceUploadIndicators_ForGov.json","false" -"ThreatIntelIndicators","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","MicrosoftDefenderThreatIntelligence","Microsoft","Microsoft Defender Threat Intelligence","Microsoft Sentinel provides you the capability to import threat intelligence generated by Microsoft to enable monitoring, alerting and hunting. Use this data connector to import Indicators of Compromise (IOCs) from Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel. Threat indicators can include IP addresses, domains, URLs, and file hashes, etc.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_MicrosoftDefenderThreatIntelligence.json","true" -"ThreatIntelObjects","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","MicrosoftDefenderThreatIntelligence","Microsoft","Microsoft Defender Threat Intelligence","Microsoft Sentinel provides you the capability to import threat intelligence generated by Microsoft to enable monitoring, alerting and hunting. Use this data connector to import Indicators of Compromise (IOCs) from Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel. Threat indicators can include IP addresses, domains, URLs, and file hashes, etc.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_MicrosoftDefenderThreatIntelligence.json","true" -"ThreatIntelIndicators","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","PremiumMicrosoftDefenderForThreatIntelligence","Microsoft","Premium Microsoft Defender Threat Intelligence","Microsoft Sentinel provides you the capability to import threat intelligence generated by Microsoft to enable monitoring, alerting and hunting. Use this data connector to import Indicators of Compromise (IOCs) from Premium Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel. Threat indicators can include IP addresses, domains, URLs, and file hashes, etc. Note: This is a paid connector. To use and ingest data from it, please purchase the ""MDTI API Access"" SKU from the Partner Center.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_PremiumMicrosoftDefenderThreatIntelligence.json","true" -"ThreatIntelObjects","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","PremiumMicrosoftDefenderForThreatIntelligence","Microsoft","Premium Microsoft Defender Threat Intelligence","Microsoft Sentinel provides you the capability to import threat intelligence generated by Microsoft to enable monitoring, alerting and hunting. Use this data connector to import Indicators of Compromise (IOCs) from Premium Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel. Threat indicators can include IP addresses, domains, URLs, and file hashes, etc. Note: This is a paid connector. To use and ingest data from it, please purchase the ""MDTI API Access"" SKU from the Partner Center.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_PremiumMicrosoftDefenderThreatIntelligence.json","true" -"CommonSecurityLog","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligence","Microsoft","Threat Intelligence Platforms","Microsoft Sentinel integrates with Microsoft Graph Security API data sources to enable monitoring, alerting, and hunting using your threat intelligence. Use this connector to send threat indicators to Microsoft Sentinel from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MindMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, and file hashes. For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2223729&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligence.json","true" -"ThreatIntelIndicators","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligence","Microsoft","Threat Intelligence Platforms","Microsoft Sentinel integrates with Microsoft Graph Security API data sources to enable monitoring, alerting, and hunting using your threat intelligence. Use this connector to send threat indicators to Microsoft Sentinel from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MindMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, and file hashes. For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2223729&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligence.json","true" -"ThreatIntelObjects","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligence","Microsoft","Threat Intelligence Platforms","Microsoft Sentinel integrates with Microsoft Graph Security API data sources to enable monitoring, alerting, and hunting using your threat intelligence. Use this connector to send threat indicators to Microsoft Sentinel from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MindMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, and file hashes. For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2223729&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligence.json","true" -"ThreatIntelIndicators","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligenceTaxii","Microsoft","Threat intelligence - TAXII","Microsoft Sentinel integrates with TAXII 2.0 and 2.1 data sources to enable monitoring, alerting, and hunting using your threat intelligence. Use this connector to send the supported STIX object types from TAXII servers to Microsoft Sentinel. Threat indicators can include IP addresses, domains, URLs, and file hashes. For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2224105&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligenceTaxii.json","true" -"ThreatIntelObjects","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligenceTaxii","Microsoft","Threat intelligence - TAXII","Microsoft Sentinel integrates with TAXII 2.0 and 2.1 data sources to enable monitoring, alerting, and hunting using your threat intelligence. Use this connector to send the supported STIX object types from TAXII servers to Microsoft Sentinel. Threat indicators can include IP addresses, domains, URLs, and file hashes. For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2224105&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligenceTaxii.json","true" -"ThreatIntelExportOperation","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligenceTaxiiExport","Microsoft","Threat intelligence - TAXII Export (Preview)","Microsoft Sentinel integrates with TAXII 2.1 servers to enable exporting of your threat intelligence objects. Use this connector to send the supported STIX object types from Microsoft Sentinel to TAXII servers.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligenceTaxiiExport.json","true" -"ThreatIntelIndicators","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligenceUploadIndicatorsAPI","Microsoft","Threat Intelligence Upload API (Preview)","Microsoft Sentinel offers a data plane API to bring in threat intelligence from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, file hashes and email addresses. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2269830&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligenceUploadIndicators.json;https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligenceUploadIndicators_ForGov.json","false" -"ThreatIntelObjects","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligenceUploadIndicatorsAPI","Microsoft","Threat Intelligence Upload API (Preview)","Microsoft Sentinel offers a data plane API to bring in threat intelligence from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, file hashes and email addresses. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2269830&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligenceUploadIndicators.json;https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligenceUploadIndicators_ForGov.json","false" -"","Threat Intelligence Solution for Azure Government","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20Solution%20for%20Azure%20Government","azuresentinel","azure-sentinel-solution-threatintelligenceazuregov","2023-03-06","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","false" -"","ThreatAnalysis&Response","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ThreatAnalysis%26Response","azuresentinel","azure-sentinel-solution-mitreattck","2021-10-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"","ThreatConnect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ThreatConnect","threatconnectinc1694630408738","sentinel-threatconnect-byol-enterprise","2023-09-11","2023-09-11","","ThreatConnect, Inc.","Partner","https://threatconnect.com/contact/","","domains","","","","","","false" -"","ThreatXCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ThreatXCloud","azuresentinel","azure-sentinel-solution-threatxwaf","2022-09-23","2022-09-23","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"Tomcat_CL","Tomcat","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tomcat","azuresentinel","azure-sentinel-solution-apachetomcat","2022-01-31","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ApacheTomcat","Apache","[Deprecated] Apache Tomcat","The Apache Tomcat solution provides the capability to ingest [Apache Tomcat](http://tomcat.apache.org/) events into Microsoft Sentinel. Refer to [Apache Tomcat documentation](http://tomcat.apache.org/tomcat-10.0-doc/logging.html) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tomcat/Data%20Connectors/Connector_Tomcat_agent.json","true" -"","Torq","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Torq","torqtechnologiesltd2020","torq_sentinel_solution","2024-12-24","","","Torq Support Team","Partner","https://support.torq.io","","domains","","","","","","false" -"","Training","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Training","","","","","","","","","","","","","","","","false" -"TransmitSecurityActivity_CL","TransmitSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TransmitSecurity","transmitsecurity","microsoft-sentinel-solution-transmitsecurity","2024-06-10","2024-11-20","","Transmit Security","Partner","https://transmitsecurity.com/support","","domains","TransmitSecurity","TransmitSecurity","Transmit Security Connector","The [Transmit Security] data connector provides the capability to ingest common Transmit Security API events into Microsoft Sentinel through the REST API. [Refer to API documentation for more information](https://developer.transmitsecurity.com/). The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TransmitSecurity/Data%20Connectors/TransmitSecurity_API_FunctionApp.JSON","true" -"CommonSecurityLog","Trend Micro Apex One","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Apex%20One","azuresentinel","azure-sentinel-solution-trendmicroapexone","2021-07-06","2022-03-24","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","TrendMicroApexOne","Trend Micro","[Deprecated] Trend Micro Apex One via Legacy Agent","The [Trend Micro Apex One](https://www.trendmicro.com/en_us/business/products/user-protection/sps/endpoint.html) data connector provides the capability to ingest [Trend Micro Apex One events](https://aka.ms/sentinel-TrendMicroApex-OneEvents) into Microsoft Sentinel. Refer to [Trend Micro Apex Central](https://aka.ms/sentinel-TrendMicroApex-OneCentral) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Apex%20One/Data%20Connectors/TrendMicro_ApexOne.json","true" -"CommonSecurityLog","Trend Micro Apex One","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Apex%20One","azuresentinel","azure-sentinel-solution-trendmicroapexone","2021-07-06","2022-03-24","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","TrendMicroApexOneAma","Trend Micro","[Deprecated] Trend Micro Apex One via AMA","The [Trend Micro Apex One](https://www.trendmicro.com/en_us/business/products/user-protection/sps/endpoint.html) data connector provides the capability to ingest [Trend Micro Apex One events](https://aka.ms/sentinel-TrendMicroApex-OneEvents) into Microsoft Sentinel. Refer to [Trend Micro Apex Central](https://aka.ms/sentinel-TrendMicroApex-OneCentral) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Apex%20One/Data%20Connectors/template_TrendMicro_ApexOneAMA.json","true" -"TrendMicroCAS_CL","Trend Micro Cloud App Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Cloud%20App%20Security","azuresentinel","azuresentinel.trendmicrocas","2021-09-28","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","TrendMicroCAS","Trend Micro","Trend Micro Cloud App Security","The [Trend Micro Cloud App Security](https://www.trendmicro.com/en_be/business/products/user-protection/sps/email-and-collaboration/cloud-app-security.html) data connector provides the capability to retrieve security event logs of the services that Cloud App Security protects and more events into Microsoft Sentinel through the Log Retrieval API. Refer to API [documentation](https://docs.trendmicro.com/en-us/enterprise/cloud-app-security-integration-api-online-help/supported-cloud-app-/log-retrieval-api/get-security-logs.aspx) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Cloud%20App%20Security/Data%20Connectors/TerndMicroCAS_API_FunctionApp.json","true" -"CommonSecurityLog","Trend Micro Deep Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Deep%20Security","trendmicro","trend_micro_deep_security_mss","2022-05-10","","","Trend Micro","Partner","https://success.trendmicro.com/dcx/s/?language=en_US","","domains","TrendMicro","Trend Micro","[Deprecated] Trend Micro Deep Security via Legacy","The Trend Micro Deep Security connector allows you to easily connect your Deep Security logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's networks/systems and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Deep%20Security/Data%20Connectors/TrendMicroDeepSecurity.json","true" -"CommonSecurityLog","Trend Micro TippingPoint","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20TippingPoint","trendmicro","trend_micro_tippingpoint_mss","2022-05-02","","","Trend Micro","Partner","https://success.trendmicro.com/dcx/s/contactus?language=en_US","","domains","TrendMicroTippingPoint","Trend Micro","[Deprecated] Trend Micro TippingPoint via Legacy","The Trend Micro TippingPoint connector allows you to easily connect your TippingPoint SMS IPS events with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's networks/systems and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20TippingPoint/Data%20Connectors/TrendMicroTippingPoint.json","true" -"TrendMicro_XDR_OAT_CL","Trend Micro Vision One","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Vision%20One","trendmicro","trend_micro_vision_one_xdr_mss","2022-05-11","2024-07-16","","Trend Micro","Partner","https://success.trendmicro.com/dcx/s/?language=en_US","","domains","TrendMicroXDR","Trend Micro","Trend Vision One","The [Trend Vision One](https://www.trendmicro.com/en_us/business/products/detection-response/xdr.html) connector allows you to easily connect your Workbench alert data with Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities. This gives you more insight into your organization's networks/systems and improves your security operation capabilities.

The Trend Vision One connector is supported in Microsoft Sentinel in the following regions: Australia East, Australia Southeast, Brazil South, Canada Central, Canada East, Central India, Central US, East Asia, East US, East US 2, France Central, Japan East, Korea Central, North Central US, North Europe, Norway East, South Africa North, South Central US, Southeast Asia, Sweden Central, Switzerland North, UAE North, UK South, UK West, West Europe, West US, West US 2, West US 3.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Vision%20One/Data%20Connectors/TrendMicroXDR.json","true" -"TrendMicro_XDR_RCA_Result_CL","Trend Micro Vision One","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Vision%20One","trendmicro","trend_micro_vision_one_xdr_mss","2022-05-11","2024-07-16","","Trend Micro","Partner","https://success.trendmicro.com/dcx/s/?language=en_US","","domains","TrendMicroXDR","Trend Micro","Trend Vision One","The [Trend Vision One](https://www.trendmicro.com/en_us/business/products/detection-response/xdr.html) connector allows you to easily connect your Workbench alert data with Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities. This gives you more insight into your organization's networks/systems and improves your security operation capabilities.

The Trend Vision One connector is supported in Microsoft Sentinel in the following regions: Australia East, Australia Southeast, Brazil South, Canada Central, Canada East, Central India, Central US, East Asia, East US, East US 2, France Central, Japan East, Korea Central, North Central US, North Europe, Norway East, South Africa North, South Central US, Southeast Asia, Sweden Central, Switzerland North, UAE North, UK South, UK West, West Europe, West US, West US 2, West US 3.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Vision%20One/Data%20Connectors/TrendMicroXDR.json","true" -"TrendMicro_XDR_RCA_Task_CL","Trend Micro Vision One","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Vision%20One","trendmicro","trend_micro_vision_one_xdr_mss","2022-05-11","2024-07-16","","Trend Micro","Partner","https://success.trendmicro.com/dcx/s/?language=en_US","","domains","TrendMicroXDR","Trend Micro","Trend Vision One","The [Trend Vision One](https://www.trendmicro.com/en_us/business/products/detection-response/xdr.html) connector allows you to easily connect your Workbench alert data with Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities. This gives you more insight into your organization's networks/systems and improves your security operation capabilities.

The Trend Vision One connector is supported in Microsoft Sentinel in the following regions: Australia East, Australia Southeast, Brazil South, Canada Central, Canada East, Central India, Central US, East Asia, East US, East US 2, France Central, Japan East, Korea Central, North Central US, North Europe, Norway East, South Africa North, South Central US, Southeast Asia, Sweden Central, Switzerland North, UAE North, UK South, UK West, West Europe, West US, West US 2, West US 3.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Vision%20One/Data%20Connectors/TrendMicroXDR.json","true" -"TrendMicro_XDR_WORKBENCH_CL","Trend Micro Vision One","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Vision%20One","trendmicro","trend_micro_vision_one_xdr_mss","2022-05-11","2024-07-16","","Trend Micro","Partner","https://success.trendmicro.com/dcx/s/?language=en_US","","domains","TrendMicroXDR","Trend Micro","Trend Vision One","The [Trend Vision One](https://www.trendmicro.com/en_us/business/products/detection-response/xdr.html) connector allows you to easily connect your Workbench alert data with Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities. This gives you more insight into your organization's networks/systems and improves your security operation capabilities.

The Trend Vision One connector is supported in Microsoft Sentinel in the following regions: Australia East, Australia Southeast, Brazil South, Canada Central, Canada East, Central India, Central US, East Asia, East US, East US 2, France Central, Japan East, Korea Central, North Central US, North Europe, Norway East, South Africa North, South Central US, Southeast Asia, Sweden Central, Switzerland North, UAE North, UK South, UK West, West Europe, West US, West US 2, West US 3.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Vision%20One/Data%20Connectors/TrendMicroXDR.json","true" -"","UEBA Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/UEBA%20Essentials","azuresentinel","azure-sentinel-solution-uebaessentials","2022-06-27","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","false" -"","URLhaus","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/URLhaus","azuresentinel","azure-sentinel-solution-urlhaus","2022-09-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"Ubiquiti_CL","Ubiquiti UniFi","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Ubiquiti%20UniFi","azuresentinel","azure-sentinel-solution-ubiquitiunifi","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","UbiquitiUnifi","Ubiquiti","[Deprecated] Ubiquiti UniFi","The [Ubiquiti UniFi](https://www.ui.com/) data connector provides the capability to ingest [Ubiquiti UniFi firewall, dns, ssh, AP events](https://help.ui.com/hc/en-us/articles/204959834-UniFi-How-to-View-Log-Files) into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Ubiquiti%20UniFi/Data%20Connectors/Connector_Ubiquiti_agent.json","true" -"ThreatIntelligenceIndicator","VMRay","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMRay","vmraygmbh1623334327435","microsoft-sentinel-solution-vmray","2025-07-23","","","VMRay","Partner","https://www.vmray.com/contact/customer-support/","","domains","VMRay","VMRay","VMRayThreatIntelligence","VMRayThreatIntelligence connector automatically generates and feeds threat intelligence for all submissions to VMRay, improving threat detection and incident response in Sentinel. This seamless integration empowers teams to proactively address emerging threats.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMRay/Data%20Connectors/VMRayThreatIntelligence_FunctionApp.json","true" -"Syslog","VMWareESXi","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMWareESXi","azuresentinel","azure-sentinel-solution-vmwareesxi","2022-01-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","VMwareESXi","VMWare","[Deprecated] VMware ESXi","The [VMware ESXi](https://www.vmware.com/products/esxi-and-esx.html) connector allows you to easily connect your VMWare ESXi logs with Microsoft Sentinel This gives you more insight into your organization's ESXi servers and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMWareESXi/Data%20Connectors/Connector_Syslog_VMwareESXi.json","true" -"CarbonBlackAuditLogs_CL","VMware Carbon Black Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud","azuresentinel","azure-sentinel-solution-vmwarecarbonblack","2022-06-01","","","Microsoft","Microsoft","https://support.microsoft.com/","","domains","VMwareCarbonBlack","VMware","VMware Carbon Black Cloud","The [VMware Carbon Black Cloud](https://www.vmware.com/products/carbon-black-cloud.html) connector provides the capability to ingest Carbon Black data into Microsoft Sentinel. The connector provides visibility into Audit, Notification and Event logs in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/VMwareCarbonBlack_API_FunctionApp.json","true" -"CarbonBlackEvents_CL","VMware Carbon Black Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud","azuresentinel","azure-sentinel-solution-vmwarecarbonblack","2022-06-01","","","Microsoft","Microsoft","https://support.microsoft.com/","","domains","VMwareCarbonBlack","VMware","VMware Carbon Black Cloud","The [VMware Carbon Black Cloud](https://www.vmware.com/products/carbon-black-cloud.html) connector provides the capability to ingest Carbon Black data into Microsoft Sentinel. The connector provides visibility into Audit, Notification and Event logs in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/VMwareCarbonBlack_API_FunctionApp.json","true" -"CarbonBlackNotifications_CL","VMware Carbon Black Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud","azuresentinel","azure-sentinel-solution-vmwarecarbonblack","2022-06-01","","","Microsoft","Microsoft","https://support.microsoft.com/","","domains","VMwareCarbonBlack","VMware","VMware Carbon Black Cloud","The [VMware Carbon Black Cloud](https://www.vmware.com/products/carbon-black-cloud.html) connector provides the capability to ingest Carbon Black data into Microsoft Sentinel. The connector provides visibility into Audit, Notification and Event logs in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/VMwareCarbonBlack_API_FunctionApp.json","true" -"ASimAuthenticationEventLogs","VMware Carbon Black Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud","azuresentinel","azure-sentinel-solution-vmwarecarbonblack","2022-06-01","","","Microsoft","Microsoft","https://support.microsoft.com/","","domains","carbonBlackAWSS3","Microsoft","VMware Carbon Black Cloud via AWS S3","The [VMware Carbon Black Cloud](https://www.vmware.com/products/carbon-black-cloud.html) via AWS S3 data connector provides the capability to ingest watchlist, alerts, auth and endpoints events via AWS S3 and stream them to ASIM normalized tables. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/CarbonBlackViaAWSS3_ConnectorDefinition.json;https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/VMwareCarbonBlackCloud_ccp/CarbonBlack_DataConnectorDefination.json","false" -"ASimFileEventLogs","VMware Carbon Black Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud","azuresentinel","azure-sentinel-solution-vmwarecarbonblack","2022-06-01","","","Microsoft","Microsoft","https://support.microsoft.com/","","domains","carbonBlackAWSS3","Microsoft","VMware Carbon Black Cloud via AWS S3","The [VMware Carbon Black Cloud](https://www.vmware.com/products/carbon-black-cloud.html) via AWS S3 data connector provides the capability to ingest watchlist, alerts, auth and endpoints events via AWS S3 and stream them to ASIM normalized tables. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/CarbonBlackViaAWSS3_ConnectorDefinition.json;https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/VMwareCarbonBlackCloud_ccp/CarbonBlack_DataConnectorDefination.json","false" -"ASimNetworkSessionLogs","VMware Carbon Black Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud","azuresentinel","azure-sentinel-solution-vmwarecarbonblack","2022-06-01","","","Microsoft","Microsoft","https://support.microsoft.com/","","domains","carbonBlackAWSS3","Microsoft","VMware Carbon Black Cloud via AWS S3","The [VMware Carbon Black Cloud](https://www.vmware.com/products/carbon-black-cloud.html) via AWS S3 data connector provides the capability to ingest watchlist, alerts, auth and endpoints events via AWS S3 and stream them to ASIM normalized tables. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/CarbonBlackViaAWSS3_ConnectorDefinition.json;https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/VMwareCarbonBlackCloud_ccp/CarbonBlack_DataConnectorDefination.json","false" -"ASimProcessEventLogs","VMware Carbon Black Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud","azuresentinel","azure-sentinel-solution-vmwarecarbonblack","2022-06-01","","","Microsoft","Microsoft","https://support.microsoft.com/","","domains","carbonBlackAWSS3","Microsoft","VMware Carbon Black Cloud via AWS S3","The [VMware Carbon Black Cloud](https://www.vmware.com/products/carbon-black-cloud.html) via AWS S3 data connector provides the capability to ingest watchlist, alerts, auth and endpoints events via AWS S3 and stream them to ASIM normalized tables. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/CarbonBlackViaAWSS3_ConnectorDefinition.json;https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/VMwareCarbonBlackCloud_ccp/CarbonBlack_DataConnectorDefination.json","false" -"ASimRegistryEventLogs","VMware Carbon Black Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud","azuresentinel","azure-sentinel-solution-vmwarecarbonblack","2022-06-01","","","Microsoft","Microsoft","https://support.microsoft.com/","","domains","carbonBlackAWSS3","Microsoft","VMware Carbon Black Cloud via AWS S3","The [VMware Carbon Black Cloud](https://www.vmware.com/products/carbon-black-cloud.html) via AWS S3 data connector provides the capability to ingest watchlist, alerts, auth and endpoints events via AWS S3 and stream them to ASIM normalized tables. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/CarbonBlackViaAWSS3_ConnectorDefinition.json;https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/VMwareCarbonBlackCloud_ccp/CarbonBlack_DataConnectorDefination.json","false" -"CarbonBlack_Alerts_CL","VMware Carbon Black Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud","azuresentinel","azure-sentinel-solution-vmwarecarbonblack","2022-06-01","","","Microsoft","Microsoft","https://support.microsoft.com/","","domains","carbonBlackAWSS3","Microsoft","VMware Carbon Black Cloud via AWS S3","The [VMware Carbon Black Cloud](https://www.vmware.com/products/carbon-black-cloud.html) via AWS S3 data connector provides the capability to ingest watchlist, alerts, auth and endpoints events via AWS S3 and stream them to ASIM normalized tables. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/CarbonBlackViaAWSS3_ConnectorDefinition.json;https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/VMwareCarbonBlackCloud_ccp/CarbonBlack_DataConnectorDefination.json","false" -"CarbonBlack_Watchlist_CL","VMware Carbon Black Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud","azuresentinel","azure-sentinel-solution-vmwarecarbonblack","2022-06-01","","","Microsoft","Microsoft","https://support.microsoft.com/","","domains","carbonBlackAWSS3","Microsoft","VMware Carbon Black Cloud via AWS S3","The [VMware Carbon Black Cloud](https://www.vmware.com/products/carbon-black-cloud.html) via AWS S3 data connector provides the capability to ingest watchlist, alerts, auth and endpoints events via AWS S3 and stream them to ASIM normalized tables. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/CarbonBlackViaAWSS3_ConnectorDefinition.json;https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/VMwareCarbonBlackCloud_ccp/CarbonBlack_DataConnectorDefination.json","false" -"VMware_CWS_DLPLogs_CL","VMware SD-WAN and SASE","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20SD-WAN%20and%20SASE","velocloud","azure-sentinel-solution-vmwaresase","2023-12-31","","","VMware by Broadcom","Partner","https://developer.vmware.com/","","domains","VMwareSDWAN","VMware by Broadcom","VMware SD-WAN and SASE Connector","The [VMware SD-WAN & SASE](https://sase.vmware.com) data connector offers the capability to ingest VMware SD-WAN and CWS events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://developer.vmware.com/apis/vmware-sase-platform/) for more information. The connector provides ability to get events which helps to examine potential network security issues, identify misconfigured network devices and monitor SD-WAN and SASE usage. If you have your own custom connector, make sure that the connector is deployed under an isolated Log Analytics Workspace first. In case of issues, questions or feature requests, please contact us via email on sase-siem-integration@vmware.com.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20SD-WAN%20and%20SASE/Data%20Connectors/Function%20App%20Connector/VMwareSASE_API_FunctionApp.json","true" -"VMware_CWS_Health_CL","VMware SD-WAN and SASE","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20SD-WAN%20and%20SASE","velocloud","azure-sentinel-solution-vmwaresase","2023-12-31","","","VMware by Broadcom","Partner","https://developer.vmware.com/","","domains","VMwareSDWAN","VMware by Broadcom","VMware SD-WAN and SASE Connector","The [VMware SD-WAN & SASE](https://sase.vmware.com) data connector offers the capability to ingest VMware SD-WAN and CWS events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://developer.vmware.com/apis/vmware-sase-platform/) for more information. The connector provides ability to get events which helps to examine potential network security issues, identify misconfigured network devices and monitor SD-WAN and SASE usage. If you have your own custom connector, make sure that the connector is deployed under an isolated Log Analytics Workspace first. In case of issues, questions or feature requests, please contact us via email on sase-siem-integration@vmware.com.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20SD-WAN%20and%20SASE/Data%20Connectors/Function%20App%20Connector/VMwareSASE_API_FunctionApp.json","true" -"VMware_CWS_Weblogs_CL","VMware SD-WAN and SASE","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20SD-WAN%20and%20SASE","velocloud","azure-sentinel-solution-vmwaresase","2023-12-31","","","VMware by Broadcom","Partner","https://developer.vmware.com/","","domains","VMwareSDWAN","VMware by Broadcom","VMware SD-WAN and SASE Connector","The [VMware SD-WAN & SASE](https://sase.vmware.com) data connector offers the capability to ingest VMware SD-WAN and CWS events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://developer.vmware.com/apis/vmware-sase-platform/) for more information. The connector provides ability to get events which helps to examine potential network security issues, identify misconfigured network devices and monitor SD-WAN and SASE usage. If you have your own custom connector, make sure that the connector is deployed under an isolated Log Analytics Workspace first. In case of issues, questions or feature requests, please contact us via email on sase-siem-integration@vmware.com.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20SD-WAN%20and%20SASE/Data%20Connectors/Function%20App%20Connector/VMwareSASE_API_FunctionApp.json","true" -"VMware_VECO_EventLogs_CL","VMware SD-WAN and SASE","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20SD-WAN%20and%20SASE","velocloud","azure-sentinel-solution-vmwaresase","2023-12-31","","","VMware by Broadcom","Partner","https://developer.vmware.com/","","domains","VMwareSDWAN","VMware by Broadcom","VMware SD-WAN and SASE Connector","The [VMware SD-WAN & SASE](https://sase.vmware.com) data connector offers the capability to ingest VMware SD-WAN and CWS events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://developer.vmware.com/apis/vmware-sase-platform/) for more information. The connector provides ability to get events which helps to examine potential network security issues, identify misconfigured network devices and monitor SD-WAN and SASE usage. If you have your own custom connector, make sure that the connector is deployed under an isolated Log Analytics Workspace first. In case of issues, questions or feature requests, please contact us via email on sase-siem-integration@vmware.com.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20SD-WAN%20and%20SASE/Data%20Connectors/Function%20App%20Connector/VMwareSASE_API_FunctionApp.json","true" -"vcenter_CL","VMware vCenter","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20vCenter","azuresentinel","azure-sentinel-solution-vcenter","2022-06-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","VMwarevCenter","VMware","[Deprecated] VMware vCenter","The [vCenter](https://www.vmware.com/in/products/vcenter-server.html) connector allows you to easily connect your vCenter server logs with Microsoft Sentinel. This gives you more insight into your organization's data centers and improves your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20vCenter/Data%20Connectors/Connector_Syslog_vcenter.json","true" -"ValenceAlert_CL","Valence Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Valence%20Security","valencesecurityinc1673598943514","valence_sentinel_solution","2023-11-20","","","Valence Security","Partner","https://www.valencesecurity.com/","","domains","ValenceSecurity","Valence Security","SaaS Security","Connects the Valence SaaS security platform Azure Log Analytics via the REST API interface.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Valence%20Security/Data%20Connectors/ValenceSecurity.json","true" -"varonisresources_CL","Varonis Purview","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Varonis%20Purview","varonis","azure-sentinel-solution-varonispurview","2025-10-27","2025-10-01","","Varonis","Partner","https://www.varonis.com/resources/support","","domains","VaronisPurviewPush","Varonis","Varonis Purview Push Connector","The [Varonis Purview](https://www.varonis.com/) connector provides the capability to sync resources from Varonis to Microsoft Purview.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Varonis%20Purview/Data%20Connectors/VaronisPurview_ccp/VaronisPurview_connectorDefinition.json","true" -"VaronisAlerts_CL","VaronisSaaS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VaronisSaaS","varonis","microsoft-sentinel-solution-varonissaas","2023-11-10","2023-11-10","","Varonis","Partner","https://www.varonis.com/resources/support","","domains","VaronisSaaS","Varonis","Varonis SaaS","Varonis SaaS provides the capability to ingest [Varonis Alerts](https://www.varonis.com/products/datalert) into Microsoft Sentinel.

Varonis prioritizes deep data visibility, classification capabilities, and automated remediation for data access. Varonis builds a single prioritized view of risk for your data, so you can proactively and systematically eliminate risk from insider threats and cyberattacks.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VaronisSaaS/Data%20Connectors/VaronisSaaS_API_FunctionApp.json","true" -"CommonSecurityLog","Vectra AI Detect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Detect","vectraaiinc","ai_vectra_detect_mss","2022-05-24","2023-04-17","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","AIVectraDetect","Vectra AI","[Deprecated] Vectra AI Detect via Legacy Agent","The AI Vectra Detect connector allows users to connect Vectra Detect logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives users more insight into their organization's network and improves their security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Detect/Data%20Connectors/AIVectraDetect.json","true" -"CommonSecurityLog","Vectra AI Detect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Detect","vectraaiinc","ai_vectra_detect_mss","2022-05-24","2023-04-17","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","AIVectraDetectAma","Vectra AI","[Deprecated] Vectra AI Detect via AMA","The AI Vectra Detect connector allows users to connect Vectra Detect logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives users more insight into their organization's network and improves their security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Detect/Data%20Connectors/template_AIVectraDetectAma.json","true" -"VectraStream","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","AIVectraStream","Vectra AI","AI Vectra Stream via Legacy Agent","The AI Vectra Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/Connector_VectraAI_Stream.json","true" -"VectraStream_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","AIVectraStream","Vectra AI","AI Vectra Stream via Legacy Agent","The AI Vectra Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/Connector_VectraAI_Stream.json","true" -"vectra_beacon_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" -"vectra_dcerpc_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" -"vectra_dhcp_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" -"vectra_dns_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" -"vectra_http_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" -"vectra_isession_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" -"vectra_kerberos_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" -"vectra_ldap_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" -"vectra_ntlm_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" -"vectra_radius_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" -"vectra_rdp_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" -"vectra_smbfiles_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" -"vectra_smbmapping_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" -"vectra_smtp_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" -"vectra_ssh_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" -"vectra_ssl_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" -"vectra_x509_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" -"Audits_Data_CL","Vectra XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR","vectraaiinc","vectra-xdr-for-microsoft-sentinel","2023-07-04","2024-08-01","","Vectra Support","Partner","https://www.vectra.ai/support","","domains","VectraXDR","Vectra","Vectra XDR","The [Vectra XDR](https://www.vectra.ai/) connector gives the capability to ingest Vectra Detections, Audits, Entity Scoring, Lockdown, Health and Entities data into Microsoft Sentinel through the Vectra REST API. Refer to the API documentation: `https://support.vectra.ai/s/article/KB-VS-1666` for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR/Data%20Connectors/VectraDataConnector/VectraXDR_API_FunctionApp.json","true" -"Detections_Data_CL","Vectra XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR","vectraaiinc","vectra-xdr-for-microsoft-sentinel","2023-07-04","2024-08-01","","Vectra Support","Partner","https://www.vectra.ai/support","","domains","VectraXDR","Vectra","Vectra XDR","The [Vectra XDR](https://www.vectra.ai/) connector gives the capability to ingest Vectra Detections, Audits, Entity Scoring, Lockdown, Health and Entities data into Microsoft Sentinel through the Vectra REST API. Refer to the API documentation: `https://support.vectra.ai/s/article/KB-VS-1666` for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR/Data%20Connectors/VectraDataConnector/VectraXDR_API_FunctionApp.json","true" -"Entities_Data_CL","Vectra XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR","vectraaiinc","vectra-xdr-for-microsoft-sentinel","2023-07-04","2024-08-01","","Vectra Support","Partner","https://www.vectra.ai/support","","domains","VectraXDR","Vectra","Vectra XDR","The [Vectra XDR](https://www.vectra.ai/) connector gives the capability to ingest Vectra Detections, Audits, Entity Scoring, Lockdown, Health and Entities data into Microsoft Sentinel through the Vectra REST API. Refer to the API documentation: `https://support.vectra.ai/s/article/KB-VS-1666` for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR/Data%20Connectors/VectraDataConnector/VectraXDR_API_FunctionApp.json","true" -"Entity_Scoring_Data_CL","Vectra XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR","vectraaiinc","vectra-xdr-for-microsoft-sentinel","2023-07-04","2024-08-01","","Vectra Support","Partner","https://www.vectra.ai/support","","domains","VectraXDR","Vectra","Vectra XDR","The [Vectra XDR](https://www.vectra.ai/) connector gives the capability to ingest Vectra Detections, Audits, Entity Scoring, Lockdown, Health and Entities data into Microsoft Sentinel through the Vectra REST API. Refer to the API documentation: `https://support.vectra.ai/s/article/KB-VS-1666` for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR/Data%20Connectors/VectraDataConnector/VectraXDR_API_FunctionApp.json","true" -"Health_Data_CL","Vectra XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR","vectraaiinc","vectra-xdr-for-microsoft-sentinel","2023-07-04","2024-08-01","","Vectra Support","Partner","https://www.vectra.ai/support","","domains","VectraXDR","Vectra","Vectra XDR","The [Vectra XDR](https://www.vectra.ai/) connector gives the capability to ingest Vectra Detections, Audits, Entity Scoring, Lockdown, Health and Entities data into Microsoft Sentinel through the Vectra REST API. Refer to the API documentation: `https://support.vectra.ai/s/article/KB-VS-1666` for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR/Data%20Connectors/VectraDataConnector/VectraXDR_API_FunctionApp.json","true" -"Lockdown_Data_CL","Vectra XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR","vectraaiinc","vectra-xdr-for-microsoft-sentinel","2023-07-04","2024-08-01","","Vectra Support","Partner","https://www.vectra.ai/support","","domains","VectraXDR","Vectra","Vectra XDR","The [Vectra XDR](https://www.vectra.ai/) connector gives the capability to ingest Vectra Detections, Audits, Entity Scoring, Lockdown, Health and Entities data into Microsoft Sentinel through the Vectra REST API. Refer to the API documentation: `https://support.vectra.ai/s/article/KB-VS-1666` for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR/Data%20Connectors/VectraDataConnector/VectraXDR_API_FunctionApp.json","true" -"VeeamAuthorizationEvents_CL","Veeam","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam","veeamsoftware","azure-sentinel-solution-veeamapp","2025-08-26","","3.0.2","Veeam Software","Partner","https://helpcenter.veeam.com/docs/security_plugins_microsoft_sentinel/guide/","","domains","VeeamCustomTablesDataConnector","Veeam","Veeam Data Connector (using Azure Functions)","Veeam Data Connector allows you to ingest Veeam telemetry data from multiple custom tables into Microsoft Sentinel.

The connector supports integration with Veeam Backup & Replication, Veeam ONE and Coveware platforms to provide comprehensive monitoring and security analytics. The data is collected through Azure Functions and stored in custom Log Analytics tables with dedicated Data Collection Rules (DCR) and Data Collection Endpoints (DCE).

**Custom Tables Included:**
- **VeeamMalwareEvents_CL**: Malware detection events from Veeam Backup & Replication
- **VeeamSecurityComplianceAnalyzer_CL**: Security & Compliance Analyzer results collected from Veeam backup infrastructure components
- **VeeamAuthorizationEvents_CL**: Authorization and authentication events
- **VeeamOneTriggeredAlarms_CL**: Triggered alarms from Veeam ONE servers
- **VeeamCovewareFindings_CL**: Security findings from Coveware solution
- **VeeamSessions_CL**: Veeam sessions","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam/Data%20Connectors/Veeam_API_FunctionApp.json","true" -"VeeamCovewareFindings_CL","Veeam","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam","veeamsoftware","azure-sentinel-solution-veeamapp","2025-08-26","","3.0.2","Veeam Software","Partner","https://helpcenter.veeam.com/docs/security_plugins_microsoft_sentinel/guide/","","domains","VeeamCustomTablesDataConnector","Veeam","Veeam Data Connector (using Azure Functions)","Veeam Data Connector allows you to ingest Veeam telemetry data from multiple custom tables into Microsoft Sentinel.

The connector supports integration with Veeam Backup & Replication, Veeam ONE and Coveware platforms to provide comprehensive monitoring and security analytics. The data is collected through Azure Functions and stored in custom Log Analytics tables with dedicated Data Collection Rules (DCR) and Data Collection Endpoints (DCE).

**Custom Tables Included:**
- **VeeamMalwareEvents_CL**: Malware detection events from Veeam Backup & Replication
- **VeeamSecurityComplianceAnalyzer_CL**: Security & Compliance Analyzer results collected from Veeam backup infrastructure components
- **VeeamAuthorizationEvents_CL**: Authorization and authentication events
- **VeeamOneTriggeredAlarms_CL**: Triggered alarms from Veeam ONE servers
- **VeeamCovewareFindings_CL**: Security findings from Coveware solution
- **VeeamSessions_CL**: Veeam sessions","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam/Data%20Connectors/Veeam_API_FunctionApp.json","true" -"VeeamMalwareEvents_CL","Veeam","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam","veeamsoftware","azure-sentinel-solution-veeamapp","2025-08-26","","3.0.2","Veeam Software","Partner","https://helpcenter.veeam.com/docs/security_plugins_microsoft_sentinel/guide/","","domains","VeeamCustomTablesDataConnector","Veeam","Veeam Data Connector (using Azure Functions)","Veeam Data Connector allows you to ingest Veeam telemetry data from multiple custom tables into Microsoft Sentinel.

The connector supports integration with Veeam Backup & Replication, Veeam ONE and Coveware platforms to provide comprehensive monitoring and security analytics. The data is collected through Azure Functions and stored in custom Log Analytics tables with dedicated Data Collection Rules (DCR) and Data Collection Endpoints (DCE).

**Custom Tables Included:**
- **VeeamMalwareEvents_CL**: Malware detection events from Veeam Backup & Replication
- **VeeamSecurityComplianceAnalyzer_CL**: Security & Compliance Analyzer results collected from Veeam backup infrastructure components
- **VeeamAuthorizationEvents_CL**: Authorization and authentication events
- **VeeamOneTriggeredAlarms_CL**: Triggered alarms from Veeam ONE servers
- **VeeamCovewareFindings_CL**: Security findings from Coveware solution
- **VeeamSessions_CL**: Veeam sessions","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam/Data%20Connectors/Veeam_API_FunctionApp.json","true" -"VeeamOneTriggeredAlarms_CL","Veeam","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam","veeamsoftware","azure-sentinel-solution-veeamapp","2025-08-26","","3.0.2","Veeam Software","Partner","https://helpcenter.veeam.com/docs/security_plugins_microsoft_sentinel/guide/","","domains","VeeamCustomTablesDataConnector","Veeam","Veeam Data Connector (using Azure Functions)","Veeam Data Connector allows you to ingest Veeam telemetry data from multiple custom tables into Microsoft Sentinel.

The connector supports integration with Veeam Backup & Replication, Veeam ONE and Coveware platforms to provide comprehensive monitoring and security analytics. The data is collected through Azure Functions and stored in custom Log Analytics tables with dedicated Data Collection Rules (DCR) and Data Collection Endpoints (DCE).

**Custom Tables Included:**
- **VeeamMalwareEvents_CL**: Malware detection events from Veeam Backup & Replication
- **VeeamSecurityComplianceAnalyzer_CL**: Security & Compliance Analyzer results collected from Veeam backup infrastructure components
- **VeeamAuthorizationEvents_CL**: Authorization and authentication events
- **VeeamOneTriggeredAlarms_CL**: Triggered alarms from Veeam ONE servers
- **VeeamCovewareFindings_CL**: Security findings from Coveware solution
- **VeeamSessions_CL**: Veeam sessions","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam/Data%20Connectors/Veeam_API_FunctionApp.json","true" -"VeeamSecurityComplianceAnalyzer_CL","Veeam","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam","veeamsoftware","azure-sentinel-solution-veeamapp","2025-08-26","","3.0.2","Veeam Software","Partner","https://helpcenter.veeam.com/docs/security_plugins_microsoft_sentinel/guide/","","domains","VeeamCustomTablesDataConnector","Veeam","Veeam Data Connector (using Azure Functions)","Veeam Data Connector allows you to ingest Veeam telemetry data from multiple custom tables into Microsoft Sentinel.

The connector supports integration with Veeam Backup & Replication, Veeam ONE and Coveware platforms to provide comprehensive monitoring and security analytics. The data is collected through Azure Functions and stored in custom Log Analytics tables with dedicated Data Collection Rules (DCR) and Data Collection Endpoints (DCE).

**Custom Tables Included:**
- **VeeamMalwareEvents_CL**: Malware detection events from Veeam Backup & Replication
- **VeeamSecurityComplianceAnalyzer_CL**: Security & Compliance Analyzer results collected from Veeam backup infrastructure components
- **VeeamAuthorizationEvents_CL**: Authorization and authentication events
- **VeeamOneTriggeredAlarms_CL**: Triggered alarms from Veeam ONE servers
- **VeeamCovewareFindings_CL**: Security findings from Coveware solution
- **VeeamSessions_CL**: Veeam sessions","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam/Data%20Connectors/Veeam_API_FunctionApp.json","true" -"VeeamSessions_CL","Veeam","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam","veeamsoftware","azure-sentinel-solution-veeamapp","2025-08-26","","3.0.2","Veeam Software","Partner","https://helpcenter.veeam.com/docs/security_plugins_microsoft_sentinel/guide/","","domains","VeeamCustomTablesDataConnector","Veeam","Veeam Data Connector (using Azure Functions)","Veeam Data Connector allows you to ingest Veeam telemetry data from multiple custom tables into Microsoft Sentinel.

The connector supports integration with Veeam Backup & Replication, Veeam ONE and Coveware platforms to provide comprehensive monitoring and security analytics. The data is collected through Azure Functions and stored in custom Log Analytics tables with dedicated Data Collection Rules (DCR) and Data Collection Endpoints (DCE).

**Custom Tables Included:**
- **VeeamMalwareEvents_CL**: Malware detection events from Veeam Backup & Replication
- **VeeamSecurityComplianceAnalyzer_CL**: Security & Compliance Analyzer results collected from Veeam backup infrastructure components
- **VeeamAuthorizationEvents_CL**: Authorization and authentication events
- **VeeamOneTriggeredAlarms_CL**: Triggered alarms from Veeam ONE servers
- **VeeamCovewareFindings_CL**: Security findings from Coveware solution
- **VeeamSessions_CL**: Veeam sessions","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam/Data%20Connectors/Veeam_API_FunctionApp.json","true" -"","Veritas NetBackup","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veritas%20NetBackup","veritas","veritas-sentinel","2023-09-25","","","Veritas Technologies LLC","Partner","https://www.veritas.com/content/support/en_US/contact-us","","domains","","","","","","false" -"CommonSecurityLog","VirtualMetric DataStream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VirtualMetric%20DataStream","virtualmetric","azure-sentinel-solution-virtualmetric-datastream","2025-09-15","","","VirtualMetric","Partner","https://support.virtualmetric.com","VirtualMetric","domains","VirtualMetricDirectorProxy","VirtualMetric","VirtualMetric Director Proxy","VirtualMetric Director Proxy deploys an Azure Function App to securely bridge VirtualMetric DataStream with Azure services including Microsoft Sentinel, Azure Data Explorer, and Azure Storage.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VirtualMetric%20DataStream/Data%20Connectors/VirtualMetric-DirectorProxy/Template_DirectorProxy.json","true" -"CommonSecurityLog","VirtualMetric DataStream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VirtualMetric%20DataStream","virtualmetric","azure-sentinel-solution-virtualmetric-datastream","2025-09-15","","","VirtualMetric","Partner","https://support.virtualmetric.com","VirtualMetric","domains","VirtualMetricMSSentinelConnector","VirtualMetric","VirtualMetric DataStream for Microsoft Sentinel","VirtualMetric DataStream connector deploys Data Collection Rules to ingest security telemetry into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VirtualMetric%20DataStream/Data%20Connectors/VirtualMetric-Sentinel/Template_Sentinel.json","true" -"CommonSecurityLog","VirtualMetric DataStream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VirtualMetric%20DataStream","virtualmetric","azure-sentinel-solution-virtualmetric-datastream","2025-09-15","","","VirtualMetric","Partner","https://support.virtualmetric.com","VirtualMetric","domains","VirtualMetricMSSentinelDataLakeConnector","VirtualMetric","VirtualMetric DataStream for Microsoft Sentinel data lake","VirtualMetric DataStream connector deploys Data Collection Rules to ingest security telemetry into Microsoft Sentinel data lake.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VirtualMetric%20DataStream/Data%20Connectors/VirtualMetric-SentinelDataLake/Template_SentinelDataLake.json","true" -"","VirusTotal","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VirusTotal","azuresentinel","azure-sentinel-solution-virustotal","2022-07-31","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","false" -"CommonSecurityLog","Votiro","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Votiro","votirocybersecltd1670174946024","votiro_data_connector","","","","Votiro","Partner","https://support.votiro.com/","","domains","Votiro","Votiro","[Deprecated] Votiro Sanitization Engine Logs","The Votiro data connector allows you to easily connect your Votiro Event logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. Using Votiro on Microsoft Sentinel will provide you more insights into the sanitization results of files.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Votiro/Data%20Connectors/VotiroEvents.json","true" -"Syslog","Watchguard Firebox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Watchguard%20Firebox","watchguard-technologies","watchguard_firebox_mss","2022-05-06","","","WatchGuard","Partner","https://www.watchguard.com/wgrd-support/contact-support","","domains","WatchguardFirebox","WatchGuard Technologies","[Deprecated] WatchGuard Firebox","WatchGuard Firebox (https://www.watchguard.com/wgrd-products/firewall-appliances and https://www.watchguard.com/wgrd-products/cloud-and-virtual-firewalls) is security products/firewall-appliances. Watchguard Firebox will send syslog to Watchguard Firebox collector agent.The agent then sends the message to the workspace.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Watchguard%20Firebox/Data%20Connectors/Connector_syslog_WatchGuardFirebox.json","true" -"","Watchlists Utilities","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Watchlists%20Utilities","azuresentinel","azure-sentinel-solution-watchlistsutilities","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","false" -"","Web Session Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Web%20Session%20Essentials","azuresentinel","azure-sentinel-solution-websession-domain","2023-06-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"","Web Shells Threat Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Web%20Shells%20Threat%20Protection","azuresentinel","azure-sentinel-solution-webshellsthreatprotection","2022-05-22","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","false" -"WindowsFirewall","Windows Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Firewall","azuresentinel","azure-sentinel-solution-windowsfirewall","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","WindowsFirewall","Microsoft","Windows Firewall","Windows Firewall is a Microsoft Windows application that filters information coming to your system from the Internet and blocking potentially harmful programs. The software blocks most programs from communicating through the firewall. Users simply add a program to the list of allowed programs to allow it to communicate through the firewall. When using a public network, Windows Firewall can also secure the system by blocking all unsolicited attempts to connect to your computer. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2219791&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Firewall/Data%20Connectors/Windows%20Firewall.JSON","true" -"ASimNetworkSessionLogs","Windows Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Firewall","azuresentinel","azure-sentinel-solution-windowsfirewall","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","WindowsFirewallAma","Microsoft","Windows Firewall Events via AMA","Windows Firewall is a Microsoft Windows application that filters information coming to your system from the internet and blocking potentially harmful programs. The firewall software blocks most programs from communicating through the firewall. To stream your Windows Firewall application logs collected from your machines, use the Azure Monitor agent (AMA) to stream those logs to the Microsoft Sentinel workspace.

A configured data collection endpoint (DCE) is required to be linked with the data collection rule (DCR) created for the AMA to collect logs. For this connector, a DCE is automatically created in the same region as the workspace. If you already use a DCE stored in the same region, it's possible to change the default created DCE and use your existing one through the API. DCEs can be located in your resources with **SentinelDCE** prefix in the resource name.

For more information, see the following articles:
- [Data collection endpoints in Azure Monitor](https://learn.microsoft.com/azure/azure-monitor/essentials/data-collection-endpoint-overview?tabs=portal)
- [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2228623&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Firewall/Data%20Connectors/template_WindowsFirewallAma.JSON","true" -"WindowsEvent","Windows Forwarded Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Forwarded%20Events","azuresentinel","azure-sentinel-solution-windowsforwardedevents","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","WindowsForwardedEvents","Microsoft","Windows Forwarded Events","You can stream all Windows Event Forwarding (WEF) logs from the Windows Servers connected to your Microsoft Sentinel workspace using Azure Monitor Agent (AMA).
This connection enables you to view dashboards, create custom alerts, and improve investigation.
This gives you more insight into your organization’s network and improves your security operation capabilities. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2219963&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Forwarded%20Events/Data%20Connectors/WindowsForwardedEvents.JSON","true" -"SecurityEvent","Windows Security Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Security%20Events","azuresentinel","azure-sentinel-solution-securityevents","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SecurityEvents","Microsoft","Security Events via Legacy Agent","You can stream all security events from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization’s network and improves your security operation capabilities. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220093&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Security%20Events/Data%20Connectors/template_SecurityEvents.JSON","true" -"SecurityEvent","Windows Security Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Security%20Events","azuresentinel","azure-sentinel-solution-securityevents","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","WindowsSecurityEvents","Microsoft","Windows Security Events via AMA","You can stream all security events from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization’s network and improves your security operation capabilities. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220225&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Security%20Events/Data%20Connectors/template_WindowsSecurityEvents.JSON","true" -"ASimDnsActivityLogs","Windows Server DNS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Server%20DNS","azuresentinel","azure-sentinel-solution-dns","2022-05-11","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ASimDnsActivityLogs","Microsoft","Windows DNS Events via AMA","The Windows DNS log connector allows you to easily filter and stream all analytics logs from your Windows DNS servers to your Microsoft Sentinel workspace using the Azure Monitoring agent (AMA). Having this data in Microsoft Sentinel helps you identify issues and security threats such as:
- Trying to resolve malicious domain names.
- Stale resource records.
- Frequently queried domain names and talkative DNS clients.
- Attacks performed on DNS server.

You can get the following insights into your Windows DNS servers from Microsoft Sentinel:
- All logs centralized in a single place.
- Request load on DNS servers.
- Dynamic DNS registration failures.

Windows DNS events are supported by Advanced SIEM Information Model (ASIM) and stream data into the ASimDnsActivityLogs table. [Learn more](https://docs.microsoft.com/azure/sentinel/normalization).

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2225993&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Server%20DNS/Data%20Connectors/template_ASimDnsActivityLogs.JSON","true" -"DnsEvents","Windows Server DNS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Server%20DNS","azuresentinel","azure-sentinel-solution-dns","2022-05-11","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","DNS","Microsoft","DNS","The DNS log connector allows you to easily connect your DNS analytic and audit logs with Microsoft Sentinel, and other related data, to improve investigation.

**When you enable DNS log collection you can:**
- Identify clients that try to resolve malicious domain names.
- Identify stale resource records.
- Identify frequently queried domain names and talkative DNS clients.
- View request load on DNS servers.
- View dynamic DNS registration failures.

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220127&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Server%20DNS/Data%20Connectors/template_DNS.JSON","true" -"DnsInventory","Windows Server DNS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Server%20DNS","azuresentinel","azure-sentinel-solution-dns","2022-05-11","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","DNS","Microsoft","DNS","The DNS log connector allows you to easily connect your DNS analytic and audit logs with Microsoft Sentinel, and other related data, to improve investigation.

**When you enable DNS log collection you can:**
- Identify clients that try to resolve malicious domain names.
- Identify stale resource records.
- Identify frequently queried domain names and talkative DNS clients.
- View request load on DNS servers.
- View dynamic DNS registration failures.

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220127&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Server%20DNS/Data%20Connectors/template_DNS.JSON","true" -"CommonSecurityLog","WireX Network Forensics Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/WireX%20Network%20Forensics%20Platform","wirexsystems1584682625009","wirex_network_forensics_platform_mss","2022-05-06","","","WireX Systems","Partner","https://wirexsystems.com/contact-us/","","domains","WireX_Systems_NFP","WireX_Systems","[Deprecated] WireX Network Forensics Platform via Legacy Agent","The WireX Systems data connector allows security professional to integrate with Microsoft Sentinel to allow you to further enrich your forensics investigations; to not only encompass the contextual content offered by WireX but to analyze data from other sources, and to create custom dashboards to give the most complete picture during a forensic investigation and to create custom workflows.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/WireX%20Network%20Forensics%20Platform/Data%20Connectors/WireXsystemsNFP%281b%29.json","true" -"CommonSecurityLog","WireX Network Forensics Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/WireX%20Network%20Forensics%20Platform","wirexsystems1584682625009","wirex_network_forensics_platform_mss","2022-05-06","","","WireX Systems","Partner","https://wirexsystems.com/contact-us/","","domains","WireX_Systems_NFPAma","WireX_Systems","[Deprecated] WireX Network Forensics Platform via AMA","The WireX Systems data connector allows security professional to integrate with Microsoft Sentinel to allow you to further enrich your forensics investigations; to not only encompass the contextual content offered by WireX but to analyze data from other sources, and to create custom dashboards to give the most complete picture during a forensic investigation and to create custom workflows.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/WireX%20Network%20Forensics%20Platform/Data%20Connectors/template_WireXsystemsNFPAMA.json","true" -"CommonSecurityLog","WithSecureElementsViaConnector","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/WithSecureElementsViaConnector","withsecurecorporation","sentinel-solution-withsecure-via-connector","2022-11-03","2022-11-03","","WithSecure","Partner","https://www.withsecure.com/en/support","","domains","WithSecureElementsViaConnector","WithSecure","[Deprecated] WithSecure Elements via Connector","WithSecure Elements is a unified cloud-based cyber security platform.
By connecting WithSecure Elements via Connector to Microsoft Sentinel, security events can be received in Common Event Format (CEF) over syslog.
It requires deploying ""Elements Connector"" either on-prem or in cloud.
The Common Event Format (CEF) provides natively search & correlation, alerting and threat intelligence enrichment for each data log.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/WithSecureElementsViaConnector/Data%20Connectors/WithSecureElementsViaConnector.json","true" -"WsSecurityEvents_CL","WithSecureElementsViaFunction","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/WithSecureElementsViaFunction","withsecurecorporation","sentinel-solution-withsecure-via-function","2024-02-22","2025-04-25","","WithSecure","Partner","https://www.withsecure.com/en/support","","domains","WithSecureElementsViaFunction","WithSecure","WithSecure Elements API (Azure Function)","WithSecure Elements is the unified cloud-based cyber security platform designed to reduce risk, complexity, and inefficiency.

Elevate your security from your endpoints to your cloud applications. Arm yourself against every type of cyber threat, from targeted attacks to zero-day ransomware.

WithSecure Elements combines powerful predictive, preventive, and responsive security capabilities - all managed and monitored through a single security center. Our modular structure and flexible pricing models give you the freedom to evolve. With our expertise and insight, you'll always be empowered - and you'll never be alone.

With Microsoft Sentinel integration, you can correlate [security events](https://connect.withsecure.com/api-reference/security-events#overview) data from the WithSecure Elements solution with data from other sources, enabling a rich overview of your entire environment and faster reaction to threats.

With this solution Azure Function is deployed to your tenant, polling periodically for the WithSecure Elements security events.

For more information visit our website at: [https://www.withsecure.com](https://www.withsecure.com).","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/WithSecureElementsViaFunction/Data%20Connectors/WithSecureElementsViaFunction.json","true" -"WizAuditLogsV2_CL","Wiz","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz","wizinc1627338511749","wizinc1627338511749_wiz_mss-sentinel","2023-06-20","","","Wiz","Partner","https://support.wiz.io/","","domains","Wiz","Wiz","Wiz","The Wiz connector allows you to easily send Wiz Issues, Vulnerability Findings, and Audit logs to Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz/Data%20Connectors/template_WIZ.json","true" -"WizAuditLogs_CL","Wiz","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz","wizinc1627338511749","wizinc1627338511749_wiz_mss-sentinel","2023-06-20","","","Wiz","Partner","https://support.wiz.io/","","domains","Wiz","Wiz","Wiz","The Wiz connector allows you to easily send Wiz Issues, Vulnerability Findings, and Audit logs to Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz/Data%20Connectors/template_WIZ.json","true" -"WizIssuesV2_CL","Wiz","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz","wizinc1627338511749","wizinc1627338511749_wiz_mss-sentinel","2023-06-20","","","Wiz","Partner","https://support.wiz.io/","","domains","Wiz","Wiz","Wiz","The Wiz connector allows you to easily send Wiz Issues, Vulnerability Findings, and Audit logs to Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz/Data%20Connectors/template_WIZ.json","true" -"WizIssues_CL","Wiz","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz","wizinc1627338511749","wizinc1627338511749_wiz_mss-sentinel","2023-06-20","","","Wiz","Partner","https://support.wiz.io/","","domains","Wiz","Wiz","Wiz","The Wiz connector allows you to easily send Wiz Issues, Vulnerability Findings, and Audit logs to Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz/Data%20Connectors/template_WIZ.json","true" -"WizVulnerabilitiesV2_CL","Wiz","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz","wizinc1627338511749","wizinc1627338511749_wiz_mss-sentinel","2023-06-20","","","Wiz","Partner","https://support.wiz.io/","","domains","Wiz","Wiz","Wiz","The Wiz connector allows you to easily send Wiz Issues, Vulnerability Findings, and Audit logs to Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz/Data%20Connectors/template_WIZ.json","true" -"WizVulnerabilities_CL","Wiz","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz","wizinc1627338511749","wizinc1627338511749_wiz_mss-sentinel","2023-06-20","","","Wiz","Partner","https://support.wiz.io/","","domains","Wiz","Wiz","Wiz","The Wiz connector allows you to easily send Wiz Issues, Vulnerability Findings, and Audit logs to Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz/Data%20Connectors/template_WIZ.json","true" -"ASimAuditEventLogs","Workday","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Workday","azuresentinel","azure-sentinel-solution-workday","2024-02-15","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","WorkdayCCPDefinition","Microsoft","Workday User Activity","The [Workday](https://www.workday.com/) User Activity data connector provides the capability to ingest User Activity Logs from [Workday API](https://community.workday.com/sites/default/files/file-hosting/restapi/index.html#privacy/v1/get-/activityLogging) into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Workday/Data%20Connectors/Workday_ccp/Workday_DataConnectorDefinition.json","true" -"Workplace_Facebook_CL","Workplace from Facebook","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Workplace%20from%20Facebook","azuresentinel","azure-sentinel-solution-workplacefromfacebook","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","WorkplaceFacebook","Facebook","Workplace from Facebook","The [Workplace](https://www.workplace.com/) data connector provides the capability to ingest common Workplace events into Microsoft Sentinel through Webhooks. Webhooks enable custom integration apps to subscribe to events in Workplace and receive updates in real time. When a change occurs in Workplace, an HTTPS POST request with event information is sent to a callback data connector URL. Refer to [Webhooks documentation](https://developers.facebook.com/docs/workplace/reference/webhooks) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Workplace%20from%20Facebook/Data%20Connectors/WorkplaceFacebook/WorkplaceFacebook_Webhooks_FunctionApp.json","true" -"ZeroFoxAlertPoller_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxAlertsDefinition","ZeroFox Enterprise","ZeroFox Enterprise - Alerts (Polling CCF)","Collects alerts from ZeroFox API.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/Alerts/ZeroFoxAlerts_ConnectorDefinition.json","true" -"ZeroFox_CTI_C2_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"ZeroFox_CTI_advanced_dark_web_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"ZeroFox_CTI_botnet_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"ZeroFox_CTI_breaches_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"ZeroFox_CTI_compromised_credentials_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"ZeroFox_CTI_credit_cards_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"ZeroFox_CTI_dark_web_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"ZeroFox_CTI_discord_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"ZeroFox_CTI_disruption_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"ZeroFox_CTI_email_addresses_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"ZeroFox_CTI_exploits_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"ZeroFox_CTI_irc_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"ZeroFox_CTI_malware_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"ZeroFox_CTI_national_ids_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"ZeroFox_CTI_phishing_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"ZeroFox_CTI_phone_numbers_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"ZeroFox_CTI_ransomware_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"ZeroFox_CTI_telegram_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"ZeroFox_CTI_threat_actors_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"ZeroFox_CTI_vulnerabilities_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"ZNSegmentAuditNativePoller_CL","ZeroNetworks","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroNetworks","zeronetworksltd1629013803351","azure-sentinel-solution-znsegmentaudit","2022-06-06","2025-09-17","","Zero Networks","Partner","https://zeronetworks.com","","domains","ZeroNetworksSegmentAuditNativePoller","Zero Networks","Zero Networks Segment Audit","The [Zero Networks Segment](https://zeronetworks.com/) Audit data connector provides the capability to ingest Zero Networks Audit events into Microsoft Sentinel through the REST API. This data connector uses Microsoft Sentinel native polling capability.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroNetworks/Data%20Connectors/SegmentNativePollerConnector/azuredeploy_ZeroNetworks_Segment_native_poller_connector.json","true" -"","ZeroTrust(TIC3.0)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroTrust%28TIC3.0%29","azuresentinel","azure-sentinel-solution-zerotrust","2021-10-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","false" -"ZimperiumMitigationLog_CL","Zimperium Mobile Threat Defense","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zimperium%20Mobile%20Threat%20Defense","zimperiuminc","zimperium_mobile_threat_defense_mss","2022-05-02","","","Zimperium","Partner","https://www.zimperium.com/support/","","domains","ZimperiumMtdAlerts","Zimperium","Zimperium Mobile Threat Defense","Zimperium Mobile Threat Defense connector gives you the ability to connect the Zimperium threat log with Microsoft Sentinel to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's mobile threat landscape and enhances your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zimperium%20Mobile%20Threat%20Defense/Data%20Connectors/Zimperium%20MTD%20Alerts.json","true" -"ZimperiumThreatLog_CL","Zimperium Mobile Threat Defense","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zimperium%20Mobile%20Threat%20Defense","zimperiuminc","zimperium_mobile_threat_defense_mss","2022-05-02","","","Zimperium","Partner","https://www.zimperium.com/support/","","domains","ZimperiumMtdAlerts","Zimperium","Zimperium Mobile Threat Defense","Zimperium Mobile Threat Defense connector gives you the ability to connect the Zimperium threat log with Microsoft Sentinel to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's mobile threat landscape and enhances your security operation capabilities.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zimperium%20Mobile%20Threat%20Defense/Data%20Connectors/Zimperium%20MTD%20Alerts.json","true" -"","Zinc Open Source","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zinc%20Open%20Source","azuresentinel","azure-sentinel-solution-zincopensource","2022-10-03","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","false" -"Zoom_CL","ZoomReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZoomReports","azuresentinel","azure-sentinel-solution-zoomreports","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","Zoom","Zoom","Zoom Reports","The [Zoom](https://zoom.us/) Reports data connector provides the capability to ingest [Zoom Reports](https://developers.zoom.us/docs/api/rest/reference/zoom-api/methods/#tag/Reports) events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://developers.zoom.us/docs/api/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZoomReports/Data%20Connectors/ZoomReports_API_FunctionApp.json","true" -"CommonSecurityLog","Zscaler Internet Access","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zscaler%20Internet%20Access","zscaler1579058425289","zscaler_internet_access_mss","2022-05-25","","","Zscaler","Partner","https://help.zscaler.com/submit-ticket-links","","domains","Zscaler","Zscaler","[Deprecated] Zscaler via Legacy Agent","The Zscaler data connector allows you to easily connect your Zscaler Internet Access (ZIA) logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. Using Zscaler on Microsoft Sentinel will provide you more insights into your organization’s Internet usage, and will enhance its security operation capabilities.​","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zscaler%20Internet%20Access/Data%20Connectors/template_Zscaler.JSON","true" -"CommonSecurityLog","Zscaler Internet Access","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zscaler%20Internet%20Access","zscaler1579058425289","zscaler_internet_access_mss","2022-05-25","","","Zscaler","Partner","https://help.zscaler.com/submit-ticket-links","","domains","ZscalerAma","Zscaler","[Deprecated] Zscaler via AMA","The Zscaler data connector allows you to easily connect your Zscaler Internet Access (ZIA) logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. Using Zscaler on Microsoft Sentinel will provide you more insights into your organization’s Internet usage, and will enhance its security operation capabilities.​","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zscaler%20Internet%20Access/Data%20Connectors/template_ZscalerAma.JSON","true" -"ZPA_CL","Zscaler Private Access (ZPA)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zscaler%20Private%20Access%20%28ZPA%29","azuresentinel","azure-sentinel-solution-zscalerprivateaccess","2022-01-31","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ZscalerPrivateAccess","Zscaler","[Deprecated] Zscaler Private Access","The [Zscaler Private Access (ZPA)](https://help.zscaler.com/zpa/what-zscaler-private-access) data connector provides the capability to ingest [Zscaler Private Access events](https://help.zscaler.com/zpa/log-streaming-service) into Microsoft Sentinel. Refer to [Zscaler Private Access documentation](https://help.zscaler.com/zpa) for more information.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zscaler%20Private%20Access%20%28ZPA%29/Data%20Connectors/Connector_LogAnalytics_agent_Zscaler_ZPA.json","true" -"NCProtectUAL_CL","archTIS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/archTIS","nucleuscyber","nc-protect-azure-sentinel-data-connector","2021-10-20","","","archTIS","Partner","https://www.archtis.com/nc-protect-support/","","domains","NucleusCyberNCProtect","archTIS","NC Protect","[NC Protect Data Connector (archtis.com)](https://info.archtis.com/get-started-with-nc-protect-sentinel-data-connector) provides the capability to ingest user activity logs and events into Microsoft Sentinel. The connector provides visibility into NC Protect user activity logs and events in Microsoft Sentinel to improve monitoring and investigation capabilities","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/archTIS/Data%20Connectors/NucleusCyberNCProtect.json","true" -"CommonSecurityLog","iboss","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/iboss","iboss","iboss-sentinel-connector","2022-02-15","","","iboss","Partner","https://www.iboss.com/contact-us/","","domains","iboss","iboss","[Deprecated] iboss via Legacy Agent","The [iboss](https://www.iboss.com) data connector enables you to seamlessly connect your Threat Console to Microsoft Sentinel and enrich your instance with iboss URL event logs. Our logs are forwarded in Common Event Format (CEF) over Syslog and the configuration required can be completed on the iboss platform without the use of a proxy. Take advantage of our connector to garner critical data points and gain insight into security threats.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/iboss/Data%20Connectors/iboss_cef.json","true" -"CommonSecurityLog","iboss","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/iboss","iboss","iboss-sentinel-connector","2022-02-15","","","iboss","Partner","https://www.iboss.com/contact-us/","","domains","ibossAma","iboss","iboss via AMA","The [iboss](https://www.iboss.com) data connector enables you to seamlessly connect your Threat Console to Microsoft Sentinel and enrich your instance with iboss URL event logs. Our logs are forwarded in Common Event Format (CEF) over Syslog and the configuration required can be completed on the iboss platform without the use of a proxy. Take advantage of our connector to garner critical data points and gain insight into security threats.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/iboss/Data%20Connectors/template_ibossAMA.json","true" -"CommonSecurityLog","vArmour Application Controller","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/vArmour%20Application%20Controller","varmournetworks","varmour_sentinel","2022-06-01","","","vArmour Networks","Partner","https://www.varmour.com/contact-us/","","domains","vArmourAC","vArmour","[Deprecated] vArmour Application Controller via Legacy Agent","vArmour reduces operational risk and increases cyber resiliency by visualizing and controlling application relationships across the enterprise. This vArmour connector enables streaming of Application Controller Violation Alerts into Microsoft Sentinel, so you can take advantage of search & correlation, alerting, & threat intelligence enrichment for each log.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/vArmour%20Application%20Controller/Data%20Connectors/Connector_vArmour_AppController_CEF.json","true" -"CommonSecurityLog","vArmour Application Controller","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/vArmour%20Application%20Controller","varmournetworks","varmour_sentinel","2022-06-01","","","vArmour Networks","Partner","https://www.varmour.com/contact-us/","","domains","vArmourACAma","vArmour","[Deprecated] vArmour Application Controller via AMA","vArmour reduces operational risk and increases cyber resiliency by visualizing and controlling application relationships across the enterprise. This vArmour connector enables streaming of Application Controller Violation Alerts into Microsoft Sentinel, so you can take advantage of search & correlation, alerting, & threat intelligence enrichment for each log.","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/vArmour%20Application%20Controller/Data%20Connectors/template_vArmour_AppControllerAMA.json","true" +"Table","solution_name","solution_folder","solution_publisher_id","solution_offer_id","solution_first_publish_date","solution_last_publish_date","solution_version","solution_support_name","solution_support_tier","solution_support_link","solution_author_name","solution_categories","connector_id","connector_publisher","connector_title","connector_description","connector_instruction_steps","connector_permissions","connector_files","is_unique" +"OnePasswordEventLogs_CL","1Password","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/1Password","1password1617200969773","azure-sentinel-solution-1password","2023-12-01","","","1Password","Partner","https://support.1password.com/","","domains","1Password","1Password","1Password","The [1Password](https://www.1password.com) solution for Microsoft Sentinel enables you to ingest 1Password logs and events into Microsoft Sentinel. The connector provides visibility into 1Password Events and Alerts in Microsoft Sentinel to improve monitoring and investigation capabilities.

**Underlying Microsoft Technologies used:**

This solution takes a dependency on the following technologies, and some of these dependencies either may be in [Preview](https://azure.microsoft.com/support/legal/preview-supplemental-terms/) state or might result in additional ingestion or operational costs:

- [Azure Functions](https://azure.microsoft.com/services/functions/#overview)","[{""description"": "">**NOTE:** This connector uses Azure Functions to connect to 1Password to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**STEP 1 - Configuration steps for the 1Password API**\n\n [Follow these instructions](https://support.1password.com/events-reporting/#appendix-issue-or-revoke-bearer-tokens) provided by 1Password to obtain an API Token. **Note:** A 1Password account is required""}, {""description"": ""**STEP 2 - Deploy the functionApp using DeployToAzure button to create the table, dcr and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the 1Password connector, a custom table needs to be created.""}, {""description"": ""This method provides an automated deployment of the 1Password connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-OnePassword-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name**, **Workspace Name**, **API Key**, and **URI**.\n - The default **Time Interval** is set to pull the last five (5) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion.\n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy."", ""title"": ""Option 1 - Azure Resource Manager (ARM) Template""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""1Password API Token"", ""description"": ""A 1Password API Token is required. [See the documentation to learn more about the 1Password API](https://developer.1password.com/docs/events-api/reference). **Note:** A 1Password account is required""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/1Password/Data%20Connectors/deployment/1Password_data_connector.json","true" +"OnePasswordEventLogs_CL","1Password","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/1Password","1password1617200969773","azure-sentinel-solution-1password","2023-12-01","","","1Password","Partner","https://support.1password.com/","","domains","1Password","1Password","1Password","The [1Password](https://www.1password.com) solution for Microsoft Sentinel enables you to ingest sign-in attempts, item usage, and audit events from your 1Password Business account using the [1Password Events Reporting API](https://developer.1password.com/docs/events-api). This allows you to monitor and investigate events in 1Password in Microsoft Sentinel along with the other applications and services your organization uses.

**Underlying Microsoft Technologies used:**

This solution depends on the following technologies, and some of which may be in [Preview](https://azure.microsoft.com/support/legal/preview-supplemental-terms/) state or may incur additional ingestion or operational costs:

- [Azure Functions](https://azure.microsoft.com/services/functions/#overview)","[{""description"": "">**NOTE:** This connector uses Azure Functions to connect to 1Password to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs from Azure. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**STEP 1 - Configuration steps for the 1Password Events Reporting API**\n\n [Follow these instructions](https://support.1password.com/events-reporting/#appendix-issue-or-revoke-bearer-tokens) provided by 1Password to obtain an Events Reporting API Token. **Note:** A 1Password Business account is required""}, {""description"": ""**STEP 2 - Deploy the functionApp using DeployToAzure button to create the table, dcr and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the 1Password connector, a custom table needs to be created.""}, {""description"": ""This method provides an automated deployment of the 1Password connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-OnePassword-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name**, **Workspace Name**, **1Password Events API Key**, and **URI**.\n - The default **Time Interval** is set to five (5) minutes. If you'd like to modify the interval, you can adjust the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion.\n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy."", ""title"": ""Option 1 - Azure Resource Manager (ARM) Template""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""1Password Events API Token"", ""description"": ""A 1Password Events API Token is required. [See the documentation to learn more about the 1Password API](https://developer.1password.com/docs/events-api/reference). \n\n**Note:** A 1Password Business account is required""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/1Password/Data%20Connectors/1Password_API_FunctionApp.json","true" +"OnePasswordEventLogs_CL","1Password","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/1Password","1password1617200969773","azure-sentinel-solution-1password","2023-12-01","","","1Password","Partner","https://support.1password.com/","","domains","1PasswordCCPDefinition","1Password","1Password (Serverless)","The 1Password CCP connector allows the user to ingest 1Password Audit, Signin & ItemUsage events into Microsoft Sentinel.","[{""title"": ""STEP 1 - Create a 1Password API token:"", ""description"": ""Follow the [1Password documentation](https://support.1password.com/events-reporting/#appendix-issue-or-revoke-bearer-tokens) for guidance on this step.""}, {""title"": ""STEP 2 - Choose the correct base URL:"", ""description"": ""There are multiple 1Password servers which might host your events. The correct server depends on your license and region. Follow the [1Password documentation](https://developer.1password.com/docs/events-api/reference/#servers) to choose the correct server. Input the base URL as displayed by the documentation (including 'https://' and without a trailing '/').""}, {""title"": ""STEP 3 - Enter your 1Password Details:"", ""description"": ""Enter the 1Password base URL & API Token below:"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Base Url"", ""placeholder"": ""Enter your Base Url"", ""type"": ""text"", ""name"": ""BaseUrl""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""Enter your API Token"", ""type"": ""password"", ""name"": ""ApiToken""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""1Password API token"", ""description"": ""A 1Password API Token is required. See the [1Password documentation](https://support.1password.com/events-reporting/#appendix-issue-or-revoke-bearer-tokens) on how to create an API token.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/1Password/Data%20Connectors/1Password_ccpv2/1Password_DataConnectorDefinition.json","true" +"apifirewall_log_1_CL","42Crunch API Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/42Crunch%20API%20Protection","42crunch1580391915541","42crunch_sentinel_solution","2022-09-21","","","42Crunch API Protection","Partner","https://42crunch.com/","","domains","42CrunchAPIProtection","42Crunch","API Protection","Connects the 42Crunch API protection to Azure Log Analytics via the REST API interface","[{""title"": ""Step 1 : Read the detailed documentation"", ""description"": ""The installation process is documented in great detail in the GitHub repository [Microsoft Sentinel integration](https://github.com/42Crunch/azure-sentinel-integration). The user should consult this repository further to understand installation and debug of the integration.""}, {""title"": ""Step 2: Retrieve the workspace access credentials"", ""description"": ""The first installation step is to retrieve both your **Workspace ID** and **Primary Key** from the Microsoft Sentinel platform.\nCopy the values shown below and save them for configuration of the API log forwarder integration."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Step 3: Install the 42Crunch protection and log forwarder"", ""description"": ""The next step is to install the 42Crunch protection and log forwarder to protect your API. Both components are availabe as containers from the [42Crunch repository](https://hub.docker.com/u/42crunch). The exact installation will depend on your environment, consult the [42Crunch protection documentation](https://docs.42crunch.com/latest/content/concepts/api_firewall_deployment_architecture.htm) for full details. Two common installation scenarios are described below:\n"", ""innerSteps"": [{""title"": ""Installation via Docker Compose"", ""description"": ""The solution can be installed using a [Docker compose file](https://github.com/42Crunch/azure-sentinel-integration/blob/main/sample-deployment/docker-compose.yml).""}, {""title"": ""Installation via Helm charts"", ""description"": ""The solution can be installed using a [Helm chart](https://github.com/42Crunch/azure-sentinel-integration/tree/main/helm/sentinel).""}]}, {""title"": ""Step 4: Test the data ingestion"", ""description"": ""In order to test the data ingestion the user should deploy the sample *httpbin* application alongside the 42Crunch protection and log forwarder [described in detail here](https://github.com/42Crunch/azure-sentinel-integration/tree/main/sample-deployment)."", ""innerSteps"": [{""title"": ""4.1 Install the sample"", ""description"": ""The sample application can be installed locally using a [Docker compose file](https://github.com/42Crunch/azure-sentinel-integration/blob/main/sample-deployment/docker-compose.yml) which will install the httpbin API server, the 42Crunch API protection and the Microsoft Sentinel log forwarder. Set the environment variables as required using the values copied from step 2.""}, {""title"": ""4.2 Run the sample"", ""description"": ""Verfify the API protection is connected to the 42Crunch platform, and then exercise the API locally on the *localhost* at port 8080 using Postman, curl, or similar. You should see a mixture of passing and failing API calls. ""}, {""title"": ""4.3 Verify the data ingestion on Log Analytics"", ""description"": ""After approximately 20 minutes access the Log Analytics workspace on your Microsoft Sentinel installation, and locate the *Custom Logs* section verify that a *apifirewall_log_1_CL* table exists. Use the sample queries to examine the data.""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/42Crunch%20API%20Protection/Data%20Connectors/42CrunchAPIProtection.json","true" +"CommonSecurityLog","AI Analyst Darktrace","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AI%20Analyst%20Darktrace","darktrace1655286944672","darktrace_mss","2022-05-02","","","Darktrace","Partner","https://www.darktrace.com/en/contact/","","domains","Darktrace","Darktrace","[Deprecated] AI Analyst Darktrace via Legacy Agent","The Darktrace connector lets users connect Darktrace Model Breaches in real-time with Microsoft Sentinel, allowing creation of custom Dashboards, Workbooks, Notebooks and Custom Alerts to improve investigation. Microsoft Sentinel's enhanced visibility into Darktrace logs enables monitoring and mitigation of security threats.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Configure Darktrace to forward Syslog messages in CEF format to your Azure workspace via the Syslog agent. \n\n 1) Within the Darktrace Threat Visualizer, navigate to the System Config page in the main menu under Admin. \n\n 2) From the left-hand menu, select Modules and choose Microsoft Sentinel from the available Workflow Integrations.\\n 3) A configuration window will open. Locate Microsoft Sentinel Syslog CEF and click New to reveal the configuration settings, unless already exposed. \n\n 4) In the Server configuration field, enter the location of the log forwarder and optionally modify the communication port. Ensure that the port selected is set to 514 and is allowed by any intermediary firewalls. \n\n 5) Configure any alert thresholds, time offsets or additional settings as required. \n\n 6) Review any additional configuration options you may wish to enable that alter the Syslog syntax.\n\n 7) Enable Send Alerts and save your changes.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AI%20Analyst%20Darktrace/Data%20Connectors/AIA-Darktrace.json","true" +"CommonSecurityLog","AI Analyst Darktrace","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AI%20Analyst%20Darktrace","darktrace1655286944672","darktrace_mss","2022-05-02","","","Darktrace","Partner","https://www.darktrace.com/en/contact/","","domains","DarktraceAma","Darktrace","[Deprecated] AI Analyst Darktrace via AMA","The Darktrace connector lets users connect Darktrace Model Breaches in real-time with Microsoft Sentinel, allowing creation of custom Dashboards, Workbooks, Notebooks and Custom Alerts to improve investigation. Microsoft Sentinel's enhanced visibility into Darktrace logs enables monitoring and mitigation of security threats.","[{""title"": """", ""description"": """", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine""}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Configure Darktrace to forward Syslog messages in CEF format to your Azure workspace via the Syslog agent. \n\n 1) Within the Darktrace Threat Visualizer, navigate to the System Config page in the main menu under Admin. \n\n 2) From the left-hand menu, select Modules and choose Microsoft Sentinel from the available Workflow Integrations.\\n 3) A configuration window will open. Locate Microsoft Sentinel Syslog CEF and click New to reveal the configuration settings, unless already exposed. \n\n 4) In the Server configuration field, enter the location of the log forwarder and optionally modify the communication port. Ensure that the port selected is set to 514 and is allowed by any intermediary firewalls. \n\n 5) Configure any alert thresholds, time offsets or additional settings as required. \n\n 6) Review any additional configuration options you may wish to enable that alter the Syslog syntax.\n\n 7) Enable Send Alerts and save your changes.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AI%20Analyst%20Darktrace/Data%20Connectors/template_AIA-DarktraceAMA.json","true" +"AIShield_CL","AIShield AI Security Monitoring","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AIShield%20AI%20Security%20Monitoring","rbei","bgsw_aishield_sentinel","2022-01-11","2025-03-06","","AIShield","Partner","https://azuremarketplace.microsoft.com/marketplace/apps/rbei.bgsw_aishield_product/","","domains","BoschAIShield","Bosch","AIShield","[AIShield](https://www.boschaishield.com/) connector allows users to connect with AIShield custom defense mechanism logs with Microsoft Sentinel, allowing the creation of dynamic Dashboards, Workbooks, Notebooks and tailored Alerts to improve investigation and thwart attacks on AI systems. It gives users more insight into their organization's AI assets security posturing and improves their AI systems security operation capabilities.AIShield.GuArdIan analyzes the LLM generated content to identify and mitigate harmful content, safeguarding against legal, policy, role based, and usage based violations","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**AIShield**](https://aka.ms/sentinel-boschaishield-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": ""\n>**IMPORTANT:** Before deploying the AIShield Connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Note"", ""description"": ""Users should have utilized AIShield SaaS offering to conduct vulnerability analysis and deployed custom defense mechanisms generated along with their AI asset. [**Click here**](https://azuremarketplace.microsoft.com/marketplace/apps/rbei.bgsw_aishield_product) to know more or get in touch.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AIShield%20AI%20Security%20Monitoring/Data%20Connectors/AIShieldConnector.json","true" +"Event","ALC-WebCTRL","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ALC-WebCTRL","azuresentinel","azure-sentinel-solution-automated-logic-webctrl","2021-11-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AutomatedLogicWebCTRL","AutomatedLogic","Automated Logic WebCTRL ","You can stream the audit logs from the WebCTRL SQL server hosted on Windows machines connected to your Microsoft Sentinel. This connection enables you to view dashboards, create custom alerts and improve investigation. This gives insights into your Industrial Control Systems that are monitored or controlled by the WebCTRL BAS application.","[{""title"": ""1. Install and onboard the Microsoft agent for Windows."", ""description"": ""Learn about [agent setup](https://docs.microsoft.com/services-hub/health/mma-setup) and [windows events onboarding](https://docs.microsoft.com/azure/azure-monitor/agents/data-sources-windows-events). \n\n You can skip this step if you have already installed the Microsoft agent for Windows""}, {""title"": ""2. Configure Windows task to read the audit data and write it to windows events"", ""description"": ""Install and configure the Windows Scheduled Task to read the audit logs in SQL and write them as Windows Events. These Windows Events will be collected by the agent and forward to Microsoft Sentinel.\n\n> Notice that the data from all machines will be stored in the selected workspace"", ""innerSteps"": [{""title"": """", ""description"": ""2.1 Copy the [setup files](https://aka.ms/sentinel-automatedlogicwebctrl-tasksetup) to a location on the server.""}, {""title"": """", ""description"": ""2.2 Update the [ALC-WebCTRL-AuditPull.ps1](https://aka.ms/sentinel-automatedlogicwebctrl-auditpull) (copied in above step) script parameters like the target database name and windows event id's. Refer comments in the script for more details.""}, {""title"": """", ""description"": ""2.3 Update the windows task settings in the [ALC-WebCTRL-AuditPullTaskConfig.xml](https://aka.ms/sentinel-automatedlogicwebctrl-auditpulltaskconfig) file that was copied in above step as per requirement. Refer comments in the file for more details.""}, {""title"": """", ""description"": ""2.4 Install windows tasks using the updated configs copied in the above steps"", ""instructions"": [{""parameters"": {""label"": ""Run the following command in powershell from the directory where the setup files are copied in step 2.1"", ""value"": ""schtasks.exe /create /XML \""ALC-WebCTRL-AuditPullTaskConfig.xml\"" /tn \""ALC-WebCTRL-AuditPull\""""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the Event schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, validate below steps for any run time issues:\n\n> 1. Make sure that the scheduled task is created and is in running state in the Windows Task Scheduler.\n\n>2. Check for task execution errors in the history tab in Windows Task Scheduler for the newly created task in step 2.4\n\n>3. Make sure that the SQL Audit table consists new records while the scheduled windows task runs.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ALC-WebCTRL/Data%20Connectors/Connector_WindowsEvents_WebCTRL.json","true" +"ARGOS_CL","ARGOSCloudSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ARGOSCloudSecurity","argoscloudsecurity1605618416175","argos-sentinel","2022-08-16","","","ARGOS Cloud Security","Partner","https://argos-security.io/contact-us","","domains","ARGOSCloudSecurity","ARGOS Cloud Security","ARGOS Cloud Security","The ARGOS Cloud Security integration for Microsoft Sentinel allows you to have all your important cloud security events in one place. This enables you to easily create dashboards, alerts, and correlate events across multiple systems. Overall this will improve your organization's security posture and security incident response.","[{""title"": ""1. Subscribe to ARGOS"", ""description"": ""Ensure you already own an ARGOS Subscription. If not, browse to [ARGOS Cloud Security](https://argos-security.io) and sign up to ARGOS.\n\nAlternatively, you can also purchase ARGOS via the [Azure Marketplace](https://azuremarketplace.microsoft.com/en-au/marketplace/apps/argoscloudsecurity1605618416175.argoscloudsecurity?tab=Overview).""}, {""title"": ""2. Configure Sentinel integration from ARGOS"", ""description"": ""Configure ARGOS to forward any new detections to your Sentinel workspace by providing ARGOS with your Workspace ID and Primary Key.\n\nThere is **no need to deploy any custom infrastructure**.\n\nEnter the information into the [ARGOS Sentinel](https://app.argos-security.io/account/sentinel) configuration page.\n\nNew detections will automatically be forwarded.\n\n[Learn more about the integration](https://www.argos-security.io/resources#integrations)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ARGOSCloudSecurity/Data%20Connectors/Connector_ARGOS.json","true" +"AWSCloudFront_AccessLog_CL","AWS CloudFront","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS%20CloudFront","azuresentinel","azure-sentinel-solution-aws-cloudfront","2025-03-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsCloudfrontCcpDefinition","Microsoft","Amazon Web Services CloudFront (via Codeless Connector Framework) (Preview)","This data connector enables the integration of AWS CloudFront logs with Microsoft Sentinel to support advanced threat detection, investigation, and security monitoring. By utilizing Amazon S3 for log storage and Amazon SQS for message queuing, the connector reliably ingests CloudFront access logs into Microsoft Sentinel","[{""title"": ""Ingesting AWS CloudFront logs in Microsoft Sentinel"", ""description"": ""### List of Resources Required:\n\n* Open ID Connect (OIDC) web identity provider\n* IAM Role\n* Amazon S3 Bucket\n* Amazon SQS\n* AWS CloudFront configuration\n\n"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS CloudFormation Deployment \n To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018**Specify template**\u2019 option, then \u2018**Upload a template file**\u2019 by clicking on \u2018**Choose file**\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018**Choose file**\u2019 and select the downloaded template. \n 3. Click '**Next**' and '**Create stack**'.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWSCloudFront resources deployment"", ""isMultiLine"": true, ""fillWith"": [""AWSCloudFront""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS%20CloudFront/Data%20Connectors/AWSCloudFrontLog_CCF/AWSCloudFrontLog_ConnectorDefinition.json","true" +"AWSSecurityHubFindings","AWS Security Hub","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS%20Security%20Hub","azuresentinel","azure-sentinel-solution-awssecurityhub","2025-03-12","2025-03-12","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsSecurityHubFindingsCcpDefinition","Microsoft","AWS Security Hub Findings (via Codeless Connector Framework)","This connector enables the ingestion of AWS Security Hub Findings, which are collected in AWS S3 buckets, into Microsoft Sentinel. It helps streamline the process of monitoring and managing security alerts by integrating AWS Security Hub Findings with Microsoft Sentinel's advanced threat detection and response capabilities.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""### 1. AWS CloudFormation Deployment \n Use the provided CloudFormation templates to configure the AWS environment for sending logs from AWS Security Hub to your Log Analytics Workspace.\n""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Deploy CloudFormation Templates in AWS: \n1. Navigate to the [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create).\n2. Click **Create stack** and select **With new resources**.\n3. Choose **Upload a template file**, then click **Choose file** to upload the appropriate CloudFormation template provided.\n4. Follow the prompts and click **Next** to complete the stack creation.\n5. After the stacks are created, note down the **Role ARN** and **SQS Queue URL**.\n""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID Connect authentication provider deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS Security Hub resources deployment"", ""isMultiLine"": true, ""fillWith"": [""AwsSecurityHub""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""### 2. Connect new collectors \n To enable AWS Security Hub Connector for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS Security Hub connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": false, ""write"": false, ""delete"": false, ""action"": true}}], ""customs"": [{""name"": ""Environment"", ""description"": ""You must have the following AWS resources defined and configured: AWS Security Hub, Amazon Data Firehose, Amazon EventBridge, S3 Bucket, Simple Queue Service (SQS), IAM roles and permissions policies.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS%20Security%20Hub/Data%20Connectors/AWSSecurityHubFindings_CCP/AWSSecurityHubFindings_DataConnectorDefinition.json","true" +"","AWS Systems Manager","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS%20Systems%20Manager","azuresentinel","azure-sentinel-solution-awssystemsmanager","","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"AWSVPCFlow","AWS VPC Flow Logs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS%20VPC%20Flow%20Logs","azuresentinel","azure-sentinel-solution-awsvpcflowlogs","2025-07-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AWSS3VPCFlowLogsParquetDefinition","Microsoft","Amazon Web Services S3 VPC Flow Logs","This connector allows you to ingest AWS VPC Flow Logs, collected in AWS S3 buckets, to Microsoft Sentinel. AWS VPC Flow Logs provide visibility into network traffic within your AWS Virtual Private Cloud (VPC), enabling security analysis and network monitoring.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS CloudFormation Deployment \n To configure access on AWS, two templates have been generated to set up the AWS environment to send VPC Flow Logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create a Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018Specify template\u2019 option, then \u2018Upload a template file\u2019 by clicking on \u2018Choose file\u2019 and selecting the appropriate CloudFormation template file provided below. Click \u2018Choose file\u2019 and select the downloaded template. \n 3. Click 'Next' and 'Create stack'.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS VPC Flow Logs resources deployment"", ""isMultiLine"": true, ""fillWith"": [""AwsVPCFlow""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the 'Add new collector' button, fill in the required information and click on 'Connect'""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.fileFormat"", ""columnName"": ""File Format""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS VPC Flow Logs connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""fileFormat"", ""required"": true, ""placeholder"": ""Select a file format"", ""options"": [{""key"": ""Json"", ""text"": ""JSON Format""}, {""key"": ""Parquet"", ""text"": ""Parquet Format""}, {""key"": ""Csv"", ""text"": ""CSV Format""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS%20VPC%20Flow%20Logs/Data%20Connectors/AWSVPCFlowLogs_CCP/AWSVPCFlowLogs_DataConnectorDefinition.json","true" +"","AWSAthena","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWSAthena","azuresentinel","azure-sentinel-solution-awsathena","2022-11-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"AWSS3ServerAccess","AWS_AccessLogs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS_AccessLogs","azuresentinel","azure-sentinel-solution-awsaccesslogs","2025-02-06","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsS3ServerAccessLogsDefinition","Microsoft","AWS S3 Server Access Logs (via Codeless Connector Framework)","This connector allows you to ingest AWS S3 Server Access Logs into Microsoft Sentinel. These logs contain detailed records for requests made to S3 buckets, including the type of request, resource accessed, requester information, and response details. These logs are useful for analyzing access patterns, debugging issues, and ensuring security compliance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""### 1. AWS CloudFormation Deployment \n To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from an AWS S3 Server Access logs to your Log Analytics Workspace.\n""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Deploy CloudFormation Templates in AWS: \n1. Navigate to the [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create).\n2. Click **Create stack** and select **With new resources**.\n3. Choose **Upload a template file**, then click **Choose file** to upload the appropriate CloudFormation template provided.\n4. Follow the prompts and click **Next** to complete the stack creation.\n5. After the stacks are created, note down the **Role ARN** and **SQS Queue URL**.\n""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID Connect authentication provider deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS Server Access resources deployment"", ""isMultiLine"": true, ""fillWith"": [""AWSS3ServerAccess""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""### 2. Connect new collectors \n To enable AWS S3 Server Access Logs Connector for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new collector"", ""subtitle"": ""AWS Server Access Logs connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""providerDisplayName"": ""Workspace"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": false, ""write"": false, ""delete"": false, ""action"": true}}], ""customs"": [{""name"": ""Environment"", ""description"": ""You must have the following AWS resources defined and configured: S3 Bucket, Simple Queue Service (SQS), IAM roles and permissions policies.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS_AccessLogs/Data%20Connectors/AwsS3ServerAccessLogsDefinition_CCP/AWSS3ServerAccessLogs_ConnectorDefinition.json","true" +"","AWS_IAM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS_IAM","azuresentinel","azure-sentinel-solution-amazonwebservicesiam","2022-09-28","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"ABNORMAL_CASES_CL","AbnormalSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AbnormalSecurity","abnormalsecuritycorporation1593011233180","fe1b4806-215b-4610-bf95-965a7a65579c","2021-10-20","","","Abnormal Security","Partner","https://abnormalsecurity.com/contact","","domains","AbnormalSecurity","AbnormalSecurity","AbnormalSecurity ","The Abnormal Security data connector provides the capability to ingest threat and case logs into Microsoft Sentinel using the [Abnormal Security Rest API.](https://app.swaggerhub.com/apis/abnormal-security/abx/)","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Abnormal Security's REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Abnormal Security API**\n\n [Follow these instructions](https://app.swaggerhub.com/apis/abnormal-security/abx) provided by Abnormal Security to configure the REST API integration. **Note:** An Abnormal Security account is required""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Abnormal Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Abnormal Security API Authorization Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""This method provides an automated deployment of the Abnormal Security connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-abnormalsecurity-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Microsoft Sentinel Workspace ID**, **Microsoft Sentinel Shared Key** and **Abnormal Security REST API Key**.\n - The default **Time Interval** is set to pull the last five (5) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion.\n 4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Abnormal Security data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-abnormalsecurity-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. AbnormalSecurityXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tSENTINEL_WORKSPACE_ID\n\t\tSENTINEL_SHARED_KEY\n\t\tABNORMAL_SECURITY_REST_API_TOKEN\n\t\tlogAnalyticsUri (optional)\n(add any other settings required by the Function App)\nSet the `uri` value to: `` \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Azure Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us.` \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Abnormal Security API Token"", ""description"": ""An Abnormal Security API Token is required. [See the documentation to learn more about Abnormal Security API](https://app.swaggerhub.com/apis/abnormal-security/abx/). **Note:** An Abnormal Security account is required""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AbnormalSecurity/Data%20Connectors/AbnormalSecurity_API_FunctionApp.json","true" +"ABNORMAL_THREAT_MESSAGES_CL","AbnormalSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AbnormalSecurity","abnormalsecuritycorporation1593011233180","fe1b4806-215b-4610-bf95-965a7a65579c","2021-10-20","","","Abnormal Security","Partner","https://abnormalsecurity.com/contact","","domains","AbnormalSecurity","AbnormalSecurity","AbnormalSecurity ","The Abnormal Security data connector provides the capability to ingest threat and case logs into Microsoft Sentinel using the [Abnormal Security Rest API.](https://app.swaggerhub.com/apis/abnormal-security/abx/)","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Abnormal Security's REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Abnormal Security API**\n\n [Follow these instructions](https://app.swaggerhub.com/apis/abnormal-security/abx) provided by Abnormal Security to configure the REST API integration. **Note:** An Abnormal Security account is required""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Abnormal Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Abnormal Security API Authorization Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""This method provides an automated deployment of the Abnormal Security connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-abnormalsecurity-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Microsoft Sentinel Workspace ID**, **Microsoft Sentinel Shared Key** and **Abnormal Security REST API Key**.\n - The default **Time Interval** is set to pull the last five (5) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion.\n 4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Abnormal Security data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-abnormalsecurity-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. AbnormalSecurityXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tSENTINEL_WORKSPACE_ID\n\t\tSENTINEL_SHARED_KEY\n\t\tABNORMAL_SECURITY_REST_API_TOKEN\n\t\tlogAnalyticsUri (optional)\n(add any other settings required by the Function App)\nSet the `uri` value to: `` \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Azure Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us.` \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Abnormal Security API Token"", ""description"": ""An Abnormal Security API Token is required. [See the documentation to learn more about Abnormal Security API](https://app.swaggerhub.com/apis/abnormal-security/abx/). **Note:** An Abnormal Security account is required""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AbnormalSecurity/Data%20Connectors/AbnormalSecurity_API_FunctionApp.json","true" +"","AbuseIPDB","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AbuseIPDB","azuresentinel","azure-sentinel-solution-abuseipdb","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"","Acronis Cyber Protect Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Acronis%20Cyber%20Protect%20Cloud","acronisinternationalgmbh","azure-sentinel-solution-acronis-cyber-protect","2025-10-28","2025-10-28","","Acronis International GmbH","Partner","https://www.acronis.com/en/support","","domains,verticals","","","","","","","","false" +"agari_apdpolicy_log_CL","Agari","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Agari","agari","Agari_MSS","2022-05-02","","","Agari","Partner","https://support.agari.com/hc/en-us/articles/360000645632-How-to-access-Agari-Support","","domains","Agari","Agari","Agari Phishing Defense and Brand Protection","This connector uses a Agari REST API connection to push data into Azure Sentinel Log Analytics.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Agari APIs to pull its logs into Azure Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""STEP 1 - Get your Agari API credentials"", ""description"": ""\n1. Log into any Agari product (Client ID and Secret are the same for all applications) \n2. Click on your username in the upper right and select **Settings**\n3. Click on the **Generate API Secret** link to generate an API client_id and client_secret (the link will read **Regenerate API Secret** if you have already generated an API client ID/secret previously)\n4. Copy both the client_id and client_secret that are generated""}, {""title"": ""STEP 2 - (Optional) Enable the Security Graph API"", ""description"": ""Follow the instrcutions found on article [Connect Azure Sentinel to your threat intelligence platform](https://docs.microsoft.com/azure/sentinel/connect-threat-intelligence#connect-azure-sentinel-to-your-threat-intelligence-platform). Once the application is created you will need to record the Tenant ID, Client ID and Client Secret.""}, {""title"": ""STEP 3 - Deploy the connector and the associated Azure Function"", ""description"": ""\n>**IMPORTANT:** Before deploying the Agari Connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Agari API credentials from the previous step."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Choose a deployement option"", ""description"": """"}, {""title"": ""Option 1: Deploy using the Azure Resource Manager (ARM) Template"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-agari-azuredeploy) \n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **Agari Client ID**, **Agari Client Secret**, select `True` or `False` for the products you subscribe to, and if you wish to share IoCs with Sentinel, select `True` For **Enable Security Graph Sharing**, and enter the required IDs from the Azure Application.\n> - The Function App will request data from the Agari APIs every 5 minutes, corresponding to the Funciton App Timer.\n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n6. **NOTE:** Due to the use of Environment Variables to store log access times, the App requires 1 additonal manual step. In the Function App, select the Function App Name and select Click on **Identity** and for System assigned Identity, click on **Azure role assignments** and **Add Role assignment**. Select **Subscription** as the scope, select your subscription and set the Role to **Contributor**. Click on **Save**.""}, {""title"": ""Option 2: Manual Deployment of Azure Functions"", ""description"": ""**1. Create a Function App**\n\n1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp), and select **+ Add**.\n2. In the **Basics** tab, ensure Runtime stack is set to **Powershell Core**. \n3. In the **Hosting** tab, ensure the **Consumption (Serverless)** plan type is selected.\n4. Make other preferrable configuration changes, if needed, then click **Create**.""}, {""title"": """", ""description"": ""**2. Import Function App Code**\n\n1. In the newly created Function App, select **Functions** on the left pane and click **+ Add**.\n2. Click on **Code + Test** on the left pane. \n3. Copy the [Function App Code](https://aka.ms/sentinel-agari-functionapp) and paste into the Function App `run.ps1` editor.\n3. Click **Save**.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following eight to twelve (8-12) application settings individually, with their respective string values (case-sensitive): \n\t\tclientID\n\t\tclientSecret\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\tenableBrandProtectionAPI\n\t\tenablePhishingResponseAPI\n\t\tenablePhishingDefenseAPI\n\t\tresGroup\n\t\tfunctionName\n\t\tsubId\n\t\tenableSecurityGraphSharing\n\t\t<--- Required if enableSecurityGraphSharing is set to true --->\n\t\tGraphTenantId\n\t\tGraphClientId\n\t\tGraphClientSecret\n\t\tlogAnalyticsUri (optional)\n> - Enter your Agari ClientID and Secret in 'clientId' and 'clientSecret'\n> - Enter 'true' or 'false' for 'enablePhishingDefense', 'enableBrandProtection', 'enablePhishingResponse' as per your product subscriptions.\n> - Enter your Resource Group name in resGroup, the name of the Function (from previous step) in functionName and your Subscription ID in subId.\n> - Enter 'true' or 'false' for 'enableSecurtyGraphAPI'. If you are enabling the Security Graph, the 'GraphTenantId','GraphClientId', and 'GraphClientSecret' is required.\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n""}, {""title"": """", ""description"": ""**4. Set Permissions for the App**\n\n1. In the Function App, select the Function App Name and select Click on **Identity** and for System assigned Identity, set the status to On. \n\n2. Next, click on **Azure role assignments** and **Add Role assignment**. Select **Subscription** as the scope, select your subscription and set the Role to **Contributor**. Click on **Save**.""}, {""title"": """", ""description"": ""**5. Complete Setup.**\n\n1. Once all application settings have been entered, click **Save**. Note that it will take some time to have the required dependencies download, so you may see some inital failure messages.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Agari Phishing Defense, Phishing Response or Brand Protection API Client ID and Secret"", ""description"": ""Ensure you have your Client ID and Secret keys. Instructions can be found on the [Agari Developers Site](https://developers.agari.com/agari-platform/docs/quick-start).""}, {""name"": ""(Optional) Microsoft Security Graph API"", ""description"": ""The Agari Function App has the ability to share threat intelleigence with Sentinel via the Security Graph API. To use this feature, you will need to enable the [Sentinel Threat Intelligence Platforms connector](https://docs.microsoft.com/azure/sentinel/connect-threat-intelligence) as well as register an application in Azure Active Directory. ""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Agari/Data%20Connectors/Agari_API_FunctionApp.json","true" +"agari_apdtc_log_CL","Agari","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Agari","agari","Agari_MSS","2022-05-02","","","Agari","Partner","https://support.agari.com/hc/en-us/articles/360000645632-How-to-access-Agari-Support","","domains","Agari","Agari","Agari Phishing Defense and Brand Protection","This connector uses a Agari REST API connection to push data into Azure Sentinel Log Analytics.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Agari APIs to pull its logs into Azure Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""STEP 1 - Get your Agari API credentials"", ""description"": ""\n1. Log into any Agari product (Client ID and Secret are the same for all applications) \n2. Click on your username in the upper right and select **Settings**\n3. Click on the **Generate API Secret** link to generate an API client_id and client_secret (the link will read **Regenerate API Secret** if you have already generated an API client ID/secret previously)\n4. Copy both the client_id and client_secret that are generated""}, {""title"": ""STEP 2 - (Optional) Enable the Security Graph API"", ""description"": ""Follow the instrcutions found on article [Connect Azure Sentinel to your threat intelligence platform](https://docs.microsoft.com/azure/sentinel/connect-threat-intelligence#connect-azure-sentinel-to-your-threat-intelligence-platform). Once the application is created you will need to record the Tenant ID, Client ID and Client Secret.""}, {""title"": ""STEP 3 - Deploy the connector and the associated Azure Function"", ""description"": ""\n>**IMPORTANT:** Before deploying the Agari Connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Agari API credentials from the previous step."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Choose a deployement option"", ""description"": """"}, {""title"": ""Option 1: Deploy using the Azure Resource Manager (ARM) Template"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-agari-azuredeploy) \n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **Agari Client ID**, **Agari Client Secret**, select `True` or `False` for the products you subscribe to, and if you wish to share IoCs with Sentinel, select `True` For **Enable Security Graph Sharing**, and enter the required IDs from the Azure Application.\n> - The Function App will request data from the Agari APIs every 5 minutes, corresponding to the Funciton App Timer.\n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n6. **NOTE:** Due to the use of Environment Variables to store log access times, the App requires 1 additonal manual step. In the Function App, select the Function App Name and select Click on **Identity** and for System assigned Identity, click on **Azure role assignments** and **Add Role assignment**. Select **Subscription** as the scope, select your subscription and set the Role to **Contributor**. Click on **Save**.""}, {""title"": ""Option 2: Manual Deployment of Azure Functions"", ""description"": ""**1. Create a Function App**\n\n1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp), and select **+ Add**.\n2. In the **Basics** tab, ensure Runtime stack is set to **Powershell Core**. \n3. In the **Hosting** tab, ensure the **Consumption (Serverless)** plan type is selected.\n4. Make other preferrable configuration changes, if needed, then click **Create**.""}, {""title"": """", ""description"": ""**2. Import Function App Code**\n\n1. In the newly created Function App, select **Functions** on the left pane and click **+ Add**.\n2. Click on **Code + Test** on the left pane. \n3. Copy the [Function App Code](https://aka.ms/sentinel-agari-functionapp) and paste into the Function App `run.ps1` editor.\n3. Click **Save**.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following eight to twelve (8-12) application settings individually, with their respective string values (case-sensitive): \n\t\tclientID\n\t\tclientSecret\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\tenableBrandProtectionAPI\n\t\tenablePhishingResponseAPI\n\t\tenablePhishingDefenseAPI\n\t\tresGroup\n\t\tfunctionName\n\t\tsubId\n\t\tenableSecurityGraphSharing\n\t\t<--- Required if enableSecurityGraphSharing is set to true --->\n\t\tGraphTenantId\n\t\tGraphClientId\n\t\tGraphClientSecret\n\t\tlogAnalyticsUri (optional)\n> - Enter your Agari ClientID and Secret in 'clientId' and 'clientSecret'\n> - Enter 'true' or 'false' for 'enablePhishingDefense', 'enableBrandProtection', 'enablePhishingResponse' as per your product subscriptions.\n> - Enter your Resource Group name in resGroup, the name of the Function (from previous step) in functionName and your Subscription ID in subId.\n> - Enter 'true' or 'false' for 'enableSecurtyGraphAPI'. If you are enabling the Security Graph, the 'GraphTenantId','GraphClientId', and 'GraphClientSecret' is required.\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n""}, {""title"": """", ""description"": ""**4. Set Permissions for the App**\n\n1. In the Function App, select the Function App Name and select Click on **Identity** and for System assigned Identity, set the status to On. \n\n2. Next, click on **Azure role assignments** and **Add Role assignment**. Select **Subscription** as the scope, select your subscription and set the Role to **Contributor**. Click on **Save**.""}, {""title"": """", ""description"": ""**5. Complete Setup.**\n\n1. Once all application settings have been entered, click **Save**. Note that it will take some time to have the required dependencies download, so you may see some inital failure messages.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Agari Phishing Defense, Phishing Response or Brand Protection API Client ID and Secret"", ""description"": ""Ensure you have your Client ID and Secret keys. Instructions can be found on the [Agari Developers Site](https://developers.agari.com/agari-platform/docs/quick-start).""}, {""name"": ""(Optional) Microsoft Security Graph API"", ""description"": ""The Agari Function App has the ability to share threat intelleigence with Sentinel via the Security Graph API. To use this feature, you will need to enable the [Sentinel Threat Intelligence Platforms connector](https://docs.microsoft.com/azure/sentinel/connect-threat-intelligence) as well as register an application in Azure Active Directory. ""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Agari/Data%20Connectors/Agari_API_FunctionApp.json","true" +"agari_bpalerts_log_CL","Agari","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Agari","agari","Agari_MSS","2022-05-02","","","Agari","Partner","https://support.agari.com/hc/en-us/articles/360000645632-How-to-access-Agari-Support","","domains","Agari","Agari","Agari Phishing Defense and Brand Protection","This connector uses a Agari REST API connection to push data into Azure Sentinel Log Analytics.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Agari APIs to pull its logs into Azure Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""STEP 1 - Get your Agari API credentials"", ""description"": ""\n1. Log into any Agari product (Client ID and Secret are the same for all applications) \n2. Click on your username in the upper right and select **Settings**\n3. Click on the **Generate API Secret** link to generate an API client_id and client_secret (the link will read **Regenerate API Secret** if you have already generated an API client ID/secret previously)\n4. Copy both the client_id and client_secret that are generated""}, {""title"": ""STEP 2 - (Optional) Enable the Security Graph API"", ""description"": ""Follow the instrcutions found on article [Connect Azure Sentinel to your threat intelligence platform](https://docs.microsoft.com/azure/sentinel/connect-threat-intelligence#connect-azure-sentinel-to-your-threat-intelligence-platform). Once the application is created you will need to record the Tenant ID, Client ID and Client Secret.""}, {""title"": ""STEP 3 - Deploy the connector and the associated Azure Function"", ""description"": ""\n>**IMPORTANT:** Before deploying the Agari Connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Agari API credentials from the previous step."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Choose a deployement option"", ""description"": """"}, {""title"": ""Option 1: Deploy using the Azure Resource Manager (ARM) Template"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-agari-azuredeploy) \n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **Agari Client ID**, **Agari Client Secret**, select `True` or `False` for the products you subscribe to, and if you wish to share IoCs with Sentinel, select `True` For **Enable Security Graph Sharing**, and enter the required IDs from the Azure Application.\n> - The Function App will request data from the Agari APIs every 5 minutes, corresponding to the Funciton App Timer.\n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n6. **NOTE:** Due to the use of Environment Variables to store log access times, the App requires 1 additonal manual step. In the Function App, select the Function App Name and select Click on **Identity** and for System assigned Identity, click on **Azure role assignments** and **Add Role assignment**. Select **Subscription** as the scope, select your subscription and set the Role to **Contributor**. Click on **Save**.""}, {""title"": ""Option 2: Manual Deployment of Azure Functions"", ""description"": ""**1. Create a Function App**\n\n1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp), and select **+ Add**.\n2. In the **Basics** tab, ensure Runtime stack is set to **Powershell Core**. \n3. In the **Hosting** tab, ensure the **Consumption (Serverless)** plan type is selected.\n4. Make other preferrable configuration changes, if needed, then click **Create**.""}, {""title"": """", ""description"": ""**2. Import Function App Code**\n\n1. In the newly created Function App, select **Functions** on the left pane and click **+ Add**.\n2. Click on **Code + Test** on the left pane. \n3. Copy the [Function App Code](https://aka.ms/sentinel-agari-functionapp) and paste into the Function App `run.ps1` editor.\n3. Click **Save**.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following eight to twelve (8-12) application settings individually, with their respective string values (case-sensitive): \n\t\tclientID\n\t\tclientSecret\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\tenableBrandProtectionAPI\n\t\tenablePhishingResponseAPI\n\t\tenablePhishingDefenseAPI\n\t\tresGroup\n\t\tfunctionName\n\t\tsubId\n\t\tenableSecurityGraphSharing\n\t\t<--- Required if enableSecurityGraphSharing is set to true --->\n\t\tGraphTenantId\n\t\tGraphClientId\n\t\tGraphClientSecret\n\t\tlogAnalyticsUri (optional)\n> - Enter your Agari ClientID and Secret in 'clientId' and 'clientSecret'\n> - Enter 'true' or 'false' for 'enablePhishingDefense', 'enableBrandProtection', 'enablePhishingResponse' as per your product subscriptions.\n> - Enter your Resource Group name in resGroup, the name of the Function (from previous step) in functionName and your Subscription ID in subId.\n> - Enter 'true' or 'false' for 'enableSecurtyGraphAPI'. If you are enabling the Security Graph, the 'GraphTenantId','GraphClientId', and 'GraphClientSecret' is required.\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n""}, {""title"": """", ""description"": ""**4. Set Permissions for the App**\n\n1. In the Function App, select the Function App Name and select Click on **Identity** and for System assigned Identity, set the status to On. \n\n2. Next, click on **Azure role assignments** and **Add Role assignment**. Select **Subscription** as the scope, select your subscription and set the Role to **Contributor**. Click on **Save**.""}, {""title"": """", ""description"": ""**5. Complete Setup.**\n\n1. Once all application settings have been entered, click **Save**. Note that it will take some time to have the required dependencies download, so you may see some inital failure messages.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Agari Phishing Defense, Phishing Response or Brand Protection API Client ID and Secret"", ""description"": ""Ensure you have your Client ID and Secret keys. Instructions can be found on the [Agari Developers Site](https://developers.agari.com/agari-platform/docs/quick-start).""}, {""name"": ""(Optional) Microsoft Security Graph API"", ""description"": ""The Agari Function App has the ability to share threat intelleigence with Sentinel via the Security Graph API. To use this feature, you will need to enable the [Sentinel Threat Intelligence Platforms connector](https://docs.microsoft.com/azure/sentinel/connect-threat-intelligence) as well as register an application in Azure Active Directory. ""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Agari/Data%20Connectors/Agari_API_FunctionApp.json","true" +"InfoSecAnalytics_CL","AgileSec Analytics Connector","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AgileSec%20Analytics%20Connector","infosecglobal1632846037582","agilesec-analytics-connector","","","","InfoSecGlobal","Partner","https://www.infosecglobal.com/","","domains","InfoSecDataConnector","InfoSecGlobal","InfoSecGlobal Data Connector","Use this data connector to integrate with InfoSec Crypto Analytics and get data sent directly to Microsoft Sentinel.","[{""title"": ""InfoSecGlobal Crypto Analytics Data Connector"", ""description"": ""1. Data is sent to Microsoft Sentinel through Logstash\n 2. Required Logstash configuration is included with Crypto Analytics installation\n 3. Documentation provided with the Crypto Analytics installation explains how to enable sending data to Microsoft Sentinel\n"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AgileSec%20Analytics%20Connector/Data%20Connectors/Connector_Analytics_InfoSec.json","true" +"CommonSecurityLog","Akamai Security Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Akamai%20Security%20Events","azuresentinel","azure-sentinel-solution-akamai","2022-03-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AkamaiSecurityEvents","Akamai","[Deprecated] Akamai Security Events via Legacy Agent","Akamai Solution for Microsoft Sentinel provides the capability to ingest [Akamai Security Events](https://www.akamai.com/us/en/products/security/) into Microsoft Sentinel. Refer to [Akamai SIEM Integration documentation](https://developer.akamai.com/tools/integrations/siem) for more information.","[{""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Akamai Security Events and load the function code or click [here](https://aka.ms/sentinel-akamaisecurityevents-parser), on the second line of the query, enter the hostname(s) of your Akamai Security Events device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""[Follow these steps](https://developer.akamai.com/tools/integrations/siem) to configure Akamai CEF connector to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Akamai%20Security%20Events/Data%20Connectors/Connector_CEF_Akamai.json","true" +"CommonSecurityLog","Akamai Security Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Akamai%20Security%20Events","azuresentinel","azure-sentinel-solution-akamai","2022-03-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AkamaiSecurityEventsAma","Akamai","[Deprecated] Akamai Security Events via AMA","Akamai Solution for Microsoft Sentinel provides the capability to ingest [Akamai Security Events](https://www.akamai.com/us/en/products/security/) into Microsoft Sentinel. Refer to [Akamai SIEM Integration documentation](https://developer.akamai.com/tools/integrations/siem) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Akamai Security Events and load the function code or click [here](https://aka.ms/sentinel-akamaisecurityevents-parser), on the second line of the query, enter the hostname(s) of your Akamai Security Events device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""[Follow these steps](https://developer.akamai.com/tools/integrations/siem) to configure Akamai CEF connector to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Akamai%20Security%20Events/Data%20Connectors/template_AkamaiSecurityEventsAMA.json","true" +"","Alibaba Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Alibaba%20Cloud","azuresentinel","azure-sentinel-solution-alibabacloud","2022-06-27","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"AliCloudActionTrailLogs_CL","Alibaba Cloud ActionTrail","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Alibaba%20Cloud%20ActionTrail","azuresentinel","azure-sentinel-solution-alibabacloud-actiontrail","2025-07-03","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AliCloudActionTrailCCPDefinition","Microsoft","Alibaba Cloud ActionTrail (via Codeless Connector Framework)","The [Alibaba Cloud ActionTrail](https://www.alibabacloud.com/product/actiontrail) data connector provides the capability to retrieve actiontrail events stored into [Alibaba Cloud Simple Log Service](https://www.alibabacloud.com/product/log-service) and store them into Microsoft Sentinel through the [SLS REST API](https://www.alibabacloud.com/help/sls/developer-reference/api-sls-2020-12-30-getlogs). The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": ""Configure access to AliCloud SLS API"", ""description"": ""Before using the API, you need to prepare your identity account and access key pair to effectively access the API.\n1. We recommend that you use a Resource Access Management (RAM) user to call API operations. For more information, see [create a RAM user and authorize the RAM user to access Simple Log Service](https://www.alibabacloud.com/help/sls/create-a-ram-user-and-authorize-the-ram-user-to-access-log-service).\n2. Obtain the access key pair for the RAM user. For details see [get Access Key pair](https://www.alibabacloud.com/help/ram/user-guide/create-an-accesskey-pair).\n\nNote the access key pair details for the next step.""}, {""title"": ""Add ActionTrail Logstore"", ""description"": ""To enable the Alibaba Cloud ActionTrail connector for Microsoft Sentinel, click upon add ActionTrail Logstore, fill the form with the Alibaba Cloud environment configuration and click Connect."", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""AliCloud SLS Logstore Endpoint URL"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Logstore"", ""title"": ""Add ActionTrail Logstore"", ""subtitle"": ""Add SLS Logstore linked to Alibaba Cloud ActionTrail"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Alibaba Cloud SLS Public Endpoint"", ""placeholder"": "".log.aliyuncs.com"", ""type"": ""string"", ""name"": ""endpoint""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Project"", ""placeholder"": """", ""type"": ""string"", ""name"": ""project""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Logstore"", ""placeholder"": """", ""type"": ""string"", ""name"": ""logstore""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Access Key ID"", ""placeholder"": ""Access Key ID"", ""type"": ""password"", ""name"": ""accessKeyId""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Access Key Secret"", ""placeholder"": ""Access Key Secret"", ""type"": ""password"", ""name"": ""accessKeySecret""}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""SLS REST API Credentials/permissions"", ""description"": ""**AliCloudAccessKeyId** and **AliCloudAccessKeySecret** are required for making API calls. RAM policy statement with action of atleast `log:GetLogStoreLogs` over resource `acs:log:{#regionId}:{#accountId}:project/{#ProjectName}/logstore/{#LogstoreName}` is needed to grant a RAM user the permissions to call this operation.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Alibaba%20Cloud%20ActionTrail/Data%20Connectors/AliCloudCloudTrailConnector_CCP/AliCloudActionTrail_DataConnectorDefinition.json","true" +"AlsidForADLog_CL","Alsid For AD","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Alsid%20For%20AD","alsid1603447574634","Alsid_For_AD_MSS","2022-05-06","","","Alsid","Partner","https://www.alsid.com/contact-us/","","domains","AlsidForAD","Alsid","Alsid for Active Directory","Alsid for Active Directory connector allows to export Alsid Indicators of Exposures, trailflow and Indicators of Attacks logs to Azure Sentinel in real time.
It provides a data parser to manipulate the logs more easily. The different workbooks ease your Active Directory monitoring and provide different ways to visualize the data. The analytic templates allow to automate responses regarding different events, exposures, or attacks.","[{""title"": """", ""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-alsidforad-parser) to create the Kusto Functions alias, **afad_parser**"", ""instructions"": []}, {""title"": ""1. Configure the Syslog server"", ""description"": ""You will first need a **linux Syslog** server that Alsid for AD will send logs to. Typically you can run **rsyslog** on **Ubuntu**.\n You can then configure this server as you wish, but it is recommended to be able to output AFAD logs in a separate file.\nAlternatively you can use [this Quickstart template](https://azure.microsoft.com/resources/templates/alsid-syslog-proxy/) which will deploy the Syslog server and the Microsoft agent for you. If you do use this template, you can skip step 3.""}, {""title"": ""2. Configure Alsid to send logs to your Syslog server"", ""description"": ""On your **Alsid for AD** portal, go to *System*, *Configuration* and then *Syslog*.\nFrom there you can create a new Syslog alert toward your Syslog server.\n\nOnce this is done, check that the logs are correctly gathered on your server in a seperate file (to do this, you can use the *Test the configuration* button in the Syslog alert configuration in AFAD).\nIf you used the Quickstart template, the Syslog server will by default listen on port 514 in UDP and 1514 in TCP, without TLS.""}, {""title"": ""3. Install and onboard the Microsoft agent for Linux"", ""description"": ""You can skip this step if you used the Quickstart template in step 1"", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""4. Configure the logs to be collected by the agents"", ""description"": ""Configure the agent to collect the logs.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Custom Logs**.\n2. Select **Apply below configuration to my machines** and click **Add**.\n3. Upload a sample AFAD Syslog file from the **Linux** machine running the **Syslog** server and click **Next**, for your convenience, you can find such a file [here](https://github.com/Azure/azure-quickstart-templates/blob/master/alsid-syslog-proxy/logs/AlsidForAD.log).\n4. Set the record delimiter to **New Line** if not already the case and click **Next**.\n5. Select **Linux** and enter the file path to the **Syslog** file, click **+** then **Next**. If you used the Quickstart template in step 1, the default location of the file is `/var/log/AlsidForAD.log`.\n6. Set the **Name** to *AlsidForADLog_CL* then click **Done** (Azure automatically adds *_CL* at the end of the name, there must be only one, make sure the name is not *AlsidForADLog_CL_CL*).\n\nAll of these steps are showcased [here](https://www.youtube.com/watch?v=JwV1uZSyXM4&feature=youtu.be) as an example"", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": """", ""description"": ""> You should now be able to receive logs in the *AlsidForADLog_CL* table, logs data can be parse using the **afad_parser()** function, used by all query samples, workbooks and analytic templates.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Alsid%20For%20AD/Data%20Connectors/AlsidForAD.json","true" +"AWSCloudTrail","Amazon Web Services","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services","azuresentinel","azure-sentinel-solution-amazonwebservices","2022-05-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AWS","Amazon","Amazon Web Services","Follow these instructions to connect to AWS and stream your CloudTrail logs into Microsoft Sentinel. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2218883&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect AWS cloud trail with Microsoft Sentinel\u200b"", ""description"": ""The connection necessitates giving Microsoft permissions to access your AWS account. To enable this, follow the instructions under [Connect AWS to Microsoft Sentinel](https://aka.ms/AWSConnector) and use these parameters when prompted:\n\n> Data from all regions will be sent to and stored in the workspace's region.\n\n> It takes about 5 minutes until the connection streams data to your workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""MicrosoftAwsAccount""], ""label"": ""Microsoft account ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""External ID (Workspace ID)""}, ""type"": ""CopyableLabel""}, {""parameters"": {""text"": ""The integration is applicable for AWS public cloud accounts."", ""visible"": false, ""inline"": true}, ""type"": ""InfoMessage""}, {""parameters"": {}, ""type"": ""AwsCloudTrail""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services/Data%20Connectors/template_AWS.json","true" +"AWSCloudTrail","Amazon Web Services","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services","azuresentinel","azure-sentinel-solution-amazonwebservices","2022-05-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsS3","Amazon","Amazon Web Services S3","This connector allows you to ingest AWS service logs, collected in AWS S3 buckets, to Microsoft Sentinel. The currently supported data types are:
* AWS CloudTrail
* VPC Flow Logs
* AWS GuardDuty
* AWSCloudWatch

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2218883&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""1. Set up your AWS environment"", ""description"": ""There are two options for setting up your AWS environment to send logs from an S3 bucket to your Log Analytics Workspace:"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Setup with PowerShell script (recommended)"", ""instructions"": [{""parameters"": {""govScript"": ""Download and extract the files from the following link: [AWS S3 Setup Script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/AWS-S3/ConfigAwsS3DataConnectorScriptsGov.zip).\n\n> 1. Make sure that you have PowerShell on your machine: [Installation instructions for PowerShell](https://docs.microsoft.com/powershell/scripting/install/installing-powershell?view=powershell-7.2).\n\n> 2. Make sure that you have the AWS CLI on your machine: [Installation instructions for the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2.html).\n\nBefore running the script, run the aws configure command from your PowerShell command line, and enter the relevant information as prompted. See [AWS Command Line Interface | Configuration basics](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html) for details. Note: When Aws configure is run, Default output format should not be set to None. It must be set to some value, such as json."", ""prodScript"": ""Download and extract the files from the following link: [AWS S3 Setup Script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/AWS-S3/ConfigAwsS3DataConnectorScripts.zip).\n\n> 1. Make sure that you have PowerShell on your machine: [Installation instructions for PowerShell](https://docs.microsoft.com/powershell/scripting/install/installing-powershell?view=powershell-7.2).\n\n> 2. Make sure that you have the AWS CLI on your machine: [Installation instructions for the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2.html).\n\nBefore running the script, run the aws configure command from your PowerShell command line, and enter the relevant information as prompted. See [AWS Command Line Interface | Configuration basics](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html) for details. Note: When Aws configure is run, Default output format should not be set to None. It must be set to some value, such as json.""}, ""type"": ""MarkdownControlEnvBased""}, {""parameters"": {""label"": ""Run script to set up the environment"", ""value"": ""./ConfigAwsConnector.ps1""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""External ID (Workspace ID)""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Manual Setup"", ""description"": ""Follow the instruction in the following link to set up the environment: [Connect AWS S3 to Microsoft Sentinel](https://aka.ms/AWSS3Connector)""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Add connection"", ""instructions"": [{""parameters"": {}, ""type"": ""AwsS3""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Environment"", ""description"": ""you must have the following AWS resources defined and configured: S3, Simple Queue Service (SQS), IAM roles and permissions policies, and the AWS services whose logs you want to collect.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services/Data%20Connectors/template_AwsS3.json","true" +"AWSCloudWatch","Amazon Web Services","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services","azuresentinel","azure-sentinel-solution-amazonwebservices","2022-05-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsS3","Amazon","Amazon Web Services S3","This connector allows you to ingest AWS service logs, collected in AWS S3 buckets, to Microsoft Sentinel. The currently supported data types are:
* AWS CloudTrail
* VPC Flow Logs
* AWS GuardDuty
* AWSCloudWatch

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2218883&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""1. Set up your AWS environment"", ""description"": ""There are two options for setting up your AWS environment to send logs from an S3 bucket to your Log Analytics Workspace:"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Setup with PowerShell script (recommended)"", ""instructions"": [{""parameters"": {""govScript"": ""Download and extract the files from the following link: [AWS S3 Setup Script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/AWS-S3/ConfigAwsS3DataConnectorScriptsGov.zip).\n\n> 1. Make sure that you have PowerShell on your machine: [Installation instructions for PowerShell](https://docs.microsoft.com/powershell/scripting/install/installing-powershell?view=powershell-7.2).\n\n> 2. Make sure that you have the AWS CLI on your machine: [Installation instructions for the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2.html).\n\nBefore running the script, run the aws configure command from your PowerShell command line, and enter the relevant information as prompted. See [AWS Command Line Interface | Configuration basics](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html) for details. Note: When Aws configure is run, Default output format should not be set to None. It must be set to some value, such as json."", ""prodScript"": ""Download and extract the files from the following link: [AWS S3 Setup Script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/AWS-S3/ConfigAwsS3DataConnectorScripts.zip).\n\n> 1. Make sure that you have PowerShell on your machine: [Installation instructions for PowerShell](https://docs.microsoft.com/powershell/scripting/install/installing-powershell?view=powershell-7.2).\n\n> 2. Make sure that you have the AWS CLI on your machine: [Installation instructions for the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2.html).\n\nBefore running the script, run the aws configure command from your PowerShell command line, and enter the relevant information as prompted. See [AWS Command Line Interface | Configuration basics](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html) for details. Note: When Aws configure is run, Default output format should not be set to None. It must be set to some value, such as json.""}, ""type"": ""MarkdownControlEnvBased""}, {""parameters"": {""label"": ""Run script to set up the environment"", ""value"": ""./ConfigAwsConnector.ps1""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""External ID (Workspace ID)""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Manual Setup"", ""description"": ""Follow the instruction in the following link to set up the environment: [Connect AWS S3 to Microsoft Sentinel](https://aka.ms/AWSS3Connector)""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Add connection"", ""instructions"": [{""parameters"": {}, ""type"": ""AwsS3""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Environment"", ""description"": ""you must have the following AWS resources defined and configured: S3, Simple Queue Service (SQS), IAM roles and permissions policies, and the AWS services whose logs you want to collect.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services/Data%20Connectors/template_AwsS3.json","true" +"AWSGuardDuty","Amazon Web Services","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services","azuresentinel","azure-sentinel-solution-amazonwebservices","2022-05-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsS3","Amazon","Amazon Web Services S3","This connector allows you to ingest AWS service logs, collected in AWS S3 buckets, to Microsoft Sentinel. The currently supported data types are:
* AWS CloudTrail
* VPC Flow Logs
* AWS GuardDuty
* AWSCloudWatch

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2218883&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""1. Set up your AWS environment"", ""description"": ""There are two options for setting up your AWS environment to send logs from an S3 bucket to your Log Analytics Workspace:"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Setup with PowerShell script (recommended)"", ""instructions"": [{""parameters"": {""govScript"": ""Download and extract the files from the following link: [AWS S3 Setup Script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/AWS-S3/ConfigAwsS3DataConnectorScriptsGov.zip).\n\n> 1. Make sure that you have PowerShell on your machine: [Installation instructions for PowerShell](https://docs.microsoft.com/powershell/scripting/install/installing-powershell?view=powershell-7.2).\n\n> 2. Make sure that you have the AWS CLI on your machine: [Installation instructions for the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2.html).\n\nBefore running the script, run the aws configure command from your PowerShell command line, and enter the relevant information as prompted. See [AWS Command Line Interface | Configuration basics](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html) for details. Note: When Aws configure is run, Default output format should not be set to None. It must be set to some value, such as json."", ""prodScript"": ""Download and extract the files from the following link: [AWS S3 Setup Script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/AWS-S3/ConfigAwsS3DataConnectorScripts.zip).\n\n> 1. Make sure that you have PowerShell on your machine: [Installation instructions for PowerShell](https://docs.microsoft.com/powershell/scripting/install/installing-powershell?view=powershell-7.2).\n\n> 2. Make sure that you have the AWS CLI on your machine: [Installation instructions for the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2.html).\n\nBefore running the script, run the aws configure command from your PowerShell command line, and enter the relevant information as prompted. See [AWS Command Line Interface | Configuration basics](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html) for details. Note: When Aws configure is run, Default output format should not be set to None. It must be set to some value, such as json.""}, ""type"": ""MarkdownControlEnvBased""}, {""parameters"": {""label"": ""Run script to set up the environment"", ""value"": ""./ConfigAwsConnector.ps1""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""External ID (Workspace ID)""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Manual Setup"", ""description"": ""Follow the instruction in the following link to set up the environment: [Connect AWS S3 to Microsoft Sentinel](https://aka.ms/AWSS3Connector)""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Add connection"", ""instructions"": [{""parameters"": {}, ""type"": ""AwsS3""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Environment"", ""description"": ""you must have the following AWS resources defined and configured: S3, Simple Queue Service (SQS), IAM roles and permissions policies, and the AWS services whose logs you want to collect.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services/Data%20Connectors/template_AwsS3.json","true" +"AWSVPCFlow","Amazon Web Services","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services","azuresentinel","azure-sentinel-solution-amazonwebservices","2022-05-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsS3","Amazon","Amazon Web Services S3","This connector allows you to ingest AWS service logs, collected in AWS S3 buckets, to Microsoft Sentinel. The currently supported data types are:
* AWS CloudTrail
* VPC Flow Logs
* AWS GuardDuty
* AWSCloudWatch

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2218883&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""1. Set up your AWS environment"", ""description"": ""There are two options for setting up your AWS environment to send logs from an S3 bucket to your Log Analytics Workspace:"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Setup with PowerShell script (recommended)"", ""instructions"": [{""parameters"": {""govScript"": ""Download and extract the files from the following link: [AWS S3 Setup Script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/AWS-S3/ConfigAwsS3DataConnectorScriptsGov.zip).\n\n> 1. Make sure that you have PowerShell on your machine: [Installation instructions for PowerShell](https://docs.microsoft.com/powershell/scripting/install/installing-powershell?view=powershell-7.2).\n\n> 2. Make sure that you have the AWS CLI on your machine: [Installation instructions for the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2.html).\n\nBefore running the script, run the aws configure command from your PowerShell command line, and enter the relevant information as prompted. See [AWS Command Line Interface | Configuration basics](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html) for details. Note: When Aws configure is run, Default output format should not be set to None. It must be set to some value, such as json."", ""prodScript"": ""Download and extract the files from the following link: [AWS S3 Setup Script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/AWS-S3/ConfigAwsS3DataConnectorScripts.zip).\n\n> 1. Make sure that you have PowerShell on your machine: [Installation instructions for PowerShell](https://docs.microsoft.com/powershell/scripting/install/installing-powershell?view=powershell-7.2).\n\n> 2. Make sure that you have the AWS CLI on your machine: [Installation instructions for the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2.html).\n\nBefore running the script, run the aws configure command from your PowerShell command line, and enter the relevant information as prompted. See [AWS Command Line Interface | Configuration basics](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html) for details. Note: When Aws configure is run, Default output format should not be set to None. It must be set to some value, such as json.""}, ""type"": ""MarkdownControlEnvBased""}, {""parameters"": {""label"": ""Run script to set up the environment"", ""value"": ""./ConfigAwsConnector.ps1""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""External ID (Workspace ID)""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Manual Setup"", ""description"": ""Follow the instruction in the following link to set up the environment: [Connect AWS S3 to Microsoft Sentinel](https://aka.ms/AWSS3Connector)""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Add connection"", ""instructions"": [{""parameters"": {}, ""type"": ""AwsS3""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Environment"", ""description"": ""you must have the following AWS resources defined and configured: S3, Simple Queue Service (SQS), IAM roles and permissions policies, and the AWS services whose logs you want to collect.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services/Data%20Connectors/template_AwsS3.json","true" +"AWSWAF","Amazon Web Services","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services","azuresentinel","azure-sentinel-solution-amazonwebservices","2022-05-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsS3WafCcpDefinition","Microsoft","Amazon Web Services S3 WAF","This connector allows you to ingest AWS WAF logs, collected in AWS S3 buckets, to Microsoft Sentinel. AWS WAF logs are detailed records of traffic that web access control lists (ACLs) analyze, which are essential for maintaining the security and performance of web applications. These logs contain information such as the time AWS WAF received the request, the specifics of the request, and the action taken by the rule that the request matched.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS CloudFormation Deployment \n To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018Specify template\u2019 option, then \u2018Upload a template file\u2019 by clicking on \u2018Choose file\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018Choose file\u2019 and select the downloaded template. \n 3. Click 'Next' and 'Create stack'.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS WAF resources deployment"", ""isMultiLine"": true, ""fillWith"": [""AwsWAF""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": false, ""write"": false, ""delete"": false, ""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services/Data%20Connectors/AWS_WAF_CCP/AwsS3_WAF_DataConnectorDefinition.json","true" +"AWSNetworkFirewallAlert","Amazon Web Services NetworkFirewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20NetworkFirewall","azuresentinel","azure-sentinel-solution-aws-networkfirewall","2025-03-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsNetworkFirewallCcpDefinition","Microsoft","Amazon Web Services NetworkFirewall (via Codeless Connector Framework)","This data connector allows you to ingest AWS Network Firewall logs into Microsoft Sentinel for advanced threat detection and security monitoring. By leveraging Amazon S3 and Amazon SQS, the connector forwards network traffic logs, intrusion detection alerts, and firewall events to Microsoft Sentinel, enabling real-time analysis and correlation with other security data","[{""title"": ""Ingesting AWS NetworkFirewall logs in Microsoft Sentinel"", ""description"": ""### List of Resources Required:\n\n* Open ID Connect (OIDC) web identity provider\n* IAM Role\n* Amazon S3 Bucket\n* Amazon SQS\n* AWSNetworkFirewall configuration\n* Follow this instructions for [AWS NetworkFirewall Data connector](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20NetworkFirewall/Data%20Connectors/readme.md) configuration \n\n"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS CloudFormation Deployment \n To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018**Specify template**\u2019 option, then \u2018**Upload a template file**\u2019 by clicking on \u2018**Choose file**\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018**Choose file**\u2019 and select the downloaded template. \n 3. Click '**Next**' and '**Create stack**'.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWSNetworkFirewall resources deployment"", ""isMultiLine"": true, ""fillWith"": [""AWSNetworkFirewall""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-AWSNetworkFirewall-AlertLog"", ""text"": ""Alert Log""}, {""key"": ""Custom-AWSNetworkFirewall-FlowLog"", ""text"": ""Flow Log""}, {""key"": ""Custom-AWSNetworkFirewall-TlsLog"", ""text"": ""Tls Log""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20NetworkFirewall/Data%20Connectors/AWSNetworkFirewallLogs_CCP/AWSNetworkFirewallLog_ConnectorDefinition.json","true" +"AWSNetworkFirewallFlow","Amazon Web Services NetworkFirewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20NetworkFirewall","azuresentinel","azure-sentinel-solution-aws-networkfirewall","2025-03-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsNetworkFirewallCcpDefinition","Microsoft","Amazon Web Services NetworkFirewall (via Codeless Connector Framework)","This data connector allows you to ingest AWS Network Firewall logs into Microsoft Sentinel for advanced threat detection and security monitoring. By leveraging Amazon S3 and Amazon SQS, the connector forwards network traffic logs, intrusion detection alerts, and firewall events to Microsoft Sentinel, enabling real-time analysis and correlation with other security data","[{""title"": ""Ingesting AWS NetworkFirewall logs in Microsoft Sentinel"", ""description"": ""### List of Resources Required:\n\n* Open ID Connect (OIDC) web identity provider\n* IAM Role\n* Amazon S3 Bucket\n* Amazon SQS\n* AWSNetworkFirewall configuration\n* Follow this instructions for [AWS NetworkFirewall Data connector](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20NetworkFirewall/Data%20Connectors/readme.md) configuration \n\n"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS CloudFormation Deployment \n To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018**Specify template**\u2019 option, then \u2018**Upload a template file**\u2019 by clicking on \u2018**Choose file**\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018**Choose file**\u2019 and select the downloaded template. \n 3. Click '**Next**' and '**Create stack**'.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWSNetworkFirewall resources deployment"", ""isMultiLine"": true, ""fillWith"": [""AWSNetworkFirewall""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-AWSNetworkFirewall-AlertLog"", ""text"": ""Alert Log""}, {""key"": ""Custom-AWSNetworkFirewall-FlowLog"", ""text"": ""Flow Log""}, {""key"": ""Custom-AWSNetworkFirewall-TlsLog"", ""text"": ""Tls Log""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20NetworkFirewall/Data%20Connectors/AWSNetworkFirewallLogs_CCP/AWSNetworkFirewallLog_ConnectorDefinition.json","true" +"AWSNetworkFirewallTls","Amazon Web Services NetworkFirewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20NetworkFirewall","azuresentinel","azure-sentinel-solution-aws-networkfirewall","2025-03-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsNetworkFirewallCcpDefinition","Microsoft","Amazon Web Services NetworkFirewall (via Codeless Connector Framework)","This data connector allows you to ingest AWS Network Firewall logs into Microsoft Sentinel for advanced threat detection and security monitoring. By leveraging Amazon S3 and Amazon SQS, the connector forwards network traffic logs, intrusion detection alerts, and firewall events to Microsoft Sentinel, enabling real-time analysis and correlation with other security data","[{""title"": ""Ingesting AWS NetworkFirewall logs in Microsoft Sentinel"", ""description"": ""### List of Resources Required:\n\n* Open ID Connect (OIDC) web identity provider\n* IAM Role\n* Amazon S3 Bucket\n* Amazon SQS\n* AWSNetworkFirewall configuration\n* Follow this instructions for [AWS NetworkFirewall Data connector](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20NetworkFirewall/Data%20Connectors/readme.md) configuration \n\n"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS CloudFormation Deployment \n To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018**Specify template**\u2019 option, then \u2018**Upload a template file**\u2019 by clicking on \u2018**Choose file**\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018**Choose file**\u2019 and select the downloaded template. \n 3. Click '**Next**' and '**Create stack**'.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWSNetworkFirewall resources deployment"", ""isMultiLine"": true, ""fillWith"": [""AWSNetworkFirewall""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-AWSNetworkFirewall-AlertLog"", ""text"": ""Alert Log""}, {""key"": ""Custom-AWSNetworkFirewall-FlowLog"", ""text"": ""Flow Log""}, {""key"": ""Custom-AWSNetworkFirewall-TlsLog"", ""text"": ""Tls Log""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20NetworkFirewall/Data%20Connectors/AWSNetworkFirewallLogs_CCP/AWSNetworkFirewallLog_ConnectorDefinition.json","true" +"AWSRoute53Resolver","Amazon Web Services Route 53","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20Route%2053","azuresentinel","azure-sentinel-solution-amazonwebservicesroute53","2025-03-21","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AWSRoute53ResolverCCPDefinition","Microsoft","Amazon Web Services S3 DNS Route53 (via Codeless Connector Framework)","This connector enables ingestion of AWS Route 53 DNS logs into Microsoft Sentinel for enhanced visibility and threat detection. It supports DNS Resolver query logs ingested directly from AWS S3 buckets, while Public DNS query logs and Route 53 audit logs can be ingested using Microsoft Sentinel's AWS CloudWatch and CloudTrail connectors. Comprehensive instructions are provided to guide you through the setup of each log type. Leverage this connector to monitor DNS activity, detect potential threats, and improve your security posture in cloud environments.","[{""title"": ""AWS Route53"", ""description"": ""This connector enables the ingestion of AWS Route 53 DNS logs into Microsoft Sentinel, providing enhanced visibility into DNS activity and strengthening threat detection capabilities. It supports direct ingestion of DNS Resolver query logs from AWS S3 buckets, while Public DNS query logs and Route 53 audit logs can be ingested via Microsoft Sentinel\u2019s AWS CloudWatch and CloudTrail connectors. Detailed setup instructions are provided for each log type. Use this connector to monitor DNS traffic, identify potential threats, and enhance your cloud security posture.\n\nYou can ingest the following type of logs from AWS Route 53 to Microsoft Sentinel:\n1. Route 53 Resolver query logs\n2. Route 53 Public Hosted zones query logs (via Microsoft Sentinel CloudWatch connector)\n3. Route 53 audit logs (via Microsoft Sentinel CloudTrail connector)""}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Ingesting Route53 Resolver query logs in Microsoft Sentinel"", ""description"": ""### List of Resources Required:\n\n* Open ID Connect (OIDC) web identity provider\n* IAM Role\n* Amazon S3 Bucket\n* Amazon SQS\n* Route 53 Resolver query logging configuration\n* VPC to associate with Route53 Resolver query log config\n\n"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS CloudFormation Deployment \n To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018**Specify template**\u2019 option, then \u2018**Upload a template file**\u2019 by clicking on \u2018**Choose file**\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018**Choose file**\u2019 and select the downloaded template. \n 3. Click '**Next**' and '**Create stack**'.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS Route53 resources deployment"", ""isMultiLine"": true, ""fillWith"": [""AWSRoute53Resolver""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""### 2. Connect new collectors \n To enable Amazon Web Services S3 DNS Route53 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS Security Hub connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""securestring"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""securestring"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}]}]}}]}, {""title"": ""Ingesting Route 53 Public Hosted zones query logs (via Microsoft Sentinel CloudWatch connector)"", ""description"": ""Public Hosted zone query logs are exported to CloudWatch service in AWS. We can use 'Amazon Web Services S3' connector to ingest CloudWatch logs from AWS to Microsoft Sentinel."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1: Configure logging for Public DNS queries"", ""description"": ""1. Sign in to the AWS Management Console and open the Route 53 console at [AWS Route 53](https://console.aws.amazon.com/route53/).\n2. Navigate to Route 53 > Hosted zones.\n3. Choose the Public hosted zone that you want to configure query logging for.\n4. In the Hosted zone details pane, click \""Configure query logging\"".\n5. Choose an existing log group or create a new log group.\n6. Choose Create.""}, {""title"": ""Step 2: Configure Amazon Web Services S3 data connector for AWS CloudWatch"", ""description"": ""AWS CloudWatch logs can be exported to an S3 bucket using lambda function. To ingest Public DNS queries from `AWS CloudWatch` to `S3` bucket and then to Microsoft Sentinel, follow the instructions provided in the [Amazon Web Services S3 connector](https://learn.microsoft.com/en-us/azure/sentinel/connect-aws?tabs=s3).""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Ingesting Route 53 audit logs (via Microsoft Sentinel CloudTrail connector)"", ""description"": ""Route 53 audit logs i.e. the logs related to actions taken by user, role or AWS service in Route 53 can be exported to an S3 bucket via AWS CloudTrail service. We can use 'Amazon Web Services S3' connector to ingest CloudTrail logs from AWS to Microsoft Sentinel."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1: Configure logging for AWS Route 53 Audit logs"", ""description"": ""1. Sign in to the AWS Management Console and open the CloudTrail console at [AWS CloudTrail](https://console.aws.amazon.com/cloudtrail)\n2. If you do not have an existing trail, click on 'Create trail'\n3. Enter a name for your trail in the Trail name field.\n4. Select Create new S3 bucket (you may also choose to use an existing S3 bucket).\n5. Leave the other settings as default, and click Next.\n6. Select Event type, make sure Management events is selected.\n7. Select API activity, 'Read' and 'Write'\n8. Click Next.\n9. Review the settings and click 'Create trail'.""}, {""title"": ""Step 2: Configure Amazon Web Services S3 data connector for AWS CloudTrail"", ""description"": ""To ingest audit and management logs from `AWS CloudTrail` to Microsoft Sentinel, follow the instructions provided in the [Amazon Web Services S3 connector](https://learn.microsoft.com/en-us/azure/sentinel/connect-aws?tabs=s3)""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": false, ""write"": false, ""delete"": false, ""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20Route%2053/Data%20Connectors/AWSRoute53Resolver_CCP/AWSRoute53Resolver_DataConnectorDefinition.json","true" +"Anvilogic_Alerts_CL","Anvilogic","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Anvilogic","anvilogic1725900018831","azure-sentinel-solution-anvilogic","2025-06-20","","","Anvilogic","Partner","https://www.anvilogic.com/","","domains","AnvilogicCCFDefinition","Anvilogic","Anvilogic","The Anvilogic data connector allows you to pull events of interest generated in the Anvilogic ADX cluster into your Microsoft Sentinel","[{""description"": ""Complete the form to ingest Anvilogic Alerts into your Microsoft Sentinel"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Token Endpoint"", ""placeholder"": ""https://login[.]microsoftonline[.]com//oauth2/v2.0/token"", ""type"": ""text"", ""name"": ""tokenEndpoint""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Anvilogic ADX Scope"", ""placeholder"": ""/.default"", ""type"": ""text"", ""name"": ""authorizationEndpoint""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Anvilogic ADX Request URI"", ""placeholder"": ""/v2/rest/query"", ""type"": ""text"", ""name"": ""apiEndpoint""}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}], ""title"": ""Connect to Anvilogic to start collecting events of interest in Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Anvilogic Application Registration Client ID and Client Secret"", ""description"": ""To access the Anvilogic ADX we require the client id and client secret from the Anvilogic app registration""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Anvilogic/Data%20Connectors/AnviLogic_CCF/Anvilogic_DataConnectorDefinition.json","true" +"","Apache Log4j Vulnerability Detection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Apache%20Log4j%20Vulnerability%20Detection","azuresentinel","azure-sentinel-solution-apachelog4jvulnerability","2021-12-15","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"ApacheHTTPServer_CL","ApacheHTTPServer","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ApacheHTTPServer","azuresentinel","azure-sentinel-solution-apachehttpserver","2021-10-27","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ApacheHTTPServer","Apache","[Deprecated] Apache HTTP Server","The Apache HTTP Server data connector provides the capability to ingest [Apache HTTP Server](http://httpd.apache.org/) events into Microsoft Sentinel. Refer to [Apache Logs documentation](https://httpd.apache.org/docs/2.4/logs.html) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias ApacheHTTPServer and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ApacheHTTPServer/Parsers/ApacheHTTPServer.txt). The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Apache HTTP Server where the logs are generated.\n\n> Logs from Apache HTTP Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the custom log directory to be collected"", ""instructions"": [{""parameters"": {""linkType"": ""OpenCustomLogsSettings""}, ""type"": ""InstallAgent""}]}, {""title"": """", ""description"": ""1. Select the link above to open your workspace advanced settings \n2. From the left pane, select **Data**, select **Custom Logs** and click **Add+**\n3. Click **Browse** to upload a sample of a Apache HTTP Server log file (e.g. access.log or error.log). Then, click **Next >**\n4. Select **New line** as the record delimiter and click **Next >**\n5. Select **Windows** or **Linux** and enter the path to Apache HTTP logs based on your configuration. Example: \n - **Windows** directory: `C:\\Server\\bin\\Apache24\\logs\\*.log`\n - **Linux** Directory: `/var/log/httpd/*.log` \n6. After entering the path, click the '+' symbol to apply, then click **Next >** \n7. Add **ApacheHTTPServer_CL** as the custom log Name and click **Done**""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ApacheHTTPServer/Data%20Connectors/Connector_ApacheHTTPServer_agent.json","true" +"CommonSecurityLog","AristaAwakeSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AristaAwakeSecurity","arista-networks","awake-security","2021-10-18","","","Arista - Awake Security","Partner","https://awakesecurity.com/","","domains","AristaAwakeSecurity","Arista Networks","[Deprecated] Awake Security via Legacy Agent","The Awake Security CEF connector allows users to send detection model matches from the Awake Security Platform to Microsoft Sentinel. Remediate threats quickly with the power of network detection and response and speed up investigations with deep visibility especially into unmanaged entities including users, devices and applications on your network. The connector also enables the creation of network security-focused custom alerts, incidents, workbooks and notebooks that align with your existing security operations workflows. ","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Awake Adversarial Model match results to a CEF collector."", ""description"": ""Perform the following steps to forward Awake Adversarial Model match results to a CEF collector listening on TCP port **514** at IP **192.168.0.1**:\n- Navigate to the Detection Management Skills page in the Awake UI.\n- Click + Add New Skill.\n- Set the Expression field to,\n>integrations.cef.tcp { destination: \""192.168.0.1\"", port: 514, secure: false, severity: Warning }\n- Set the Title field to a descriptive name like,\n>Forward Awake Adversarial Model match result to Microsoft Sentinel.\n- Set the Reference Identifier to something easily discoverable like,\n>integrations.cef.sentinel-forwarder\n- Click Save.\n\nNote: Within a few minutes of saving the definition and other fields the system will begin sending new model match results to the CEF events collector as they are detected.\n\nFor more information, refer to the **Adding a Security Information and Event Management Push Integration** page from the Help Documentation in the Awake UI.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AristaAwakeSecurity/Data%20Connectors/Connector_AristaAwakeSecurity_CEF.json","true" +"Armis_Activities_CL","Armis","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis","armisinc1668090987837","armis-solution","2022-08-02","2024-08-23","","Armis Corporation","Partner","https://support.armis.com/","","domains","ArmisActivities","Armis","Armis Activities","The [Armis](https://www.armis.com/) Activities connector gives the capability to ingest Armis device Activities into Microsoft Sentinel through the Armis REST API. Refer to the API documentation: `https://.armis.com/api/v1/doc` for more information. The connector provides the ability to get device activity information from the Armis platform. Armis uses your existing infrastructure to discover and identify devices without having to deploy any agents. Armis detects what all devices are doing in your environment and classifies those activities to get a complete picture of device behavior. These activities are analyzed for an understanding of normal and abnormal device behavior and used to assess device and network risk.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Armis API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias ArmisActivities and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis/Parsers/ArmisActivities.yaml). The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Armis API**\n\n Follow these instructions to create an Armis API secret key.\n 1. Log into your Armis instance\n 2. Navigate to Settings -> API Management\n 3. If the secret key has not already been created, press the Create button to create the secret key\n 4. To access the secret key, press the Show button\n 5. The secret key can now be copied and used during the Armis Activities connector configuration""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Armis Activities data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Armis API Authorization Key(s)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Armis connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ArmisActivitiesAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-ArmisActivitiesAPI-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tArmis Secret Key \n\t\tArmis URL (https://.armis.com/api/v1/) \n\t\tArmis Activity Table Name \n\t\tArmis Schedule \n\t\tAvoid Duplicates (Default: false) \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Armis Activity data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-ArmisActivitiesAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ARMISXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tArmis Secret Key \n\t\tArmis URL (https://.armis.com/api/v1/) \n\t\tArmis Activity Table Name \n\t\tArmis Schedule \n\t\tAvoid Duplicates (Default: false) \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Armis Secret Key** is required. See the documentation to learn more about API on the `https://.armis.com/api/v1/doc`""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis/Data%20Connectors/ArmisActivities/ArmisActivities_API_FunctionApp.json","true" +"Armis_Alerts_CL","Armis","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis","armisinc1668090987837","armis-solution","2022-08-02","2024-08-23","","Armis Corporation","Partner","https://support.armis.com/","","domains","ArmisAlerts","Armis","Armis Alerts","The [Armis](https://www.armis.com/) Alerts connector gives the capability to ingest Armis Alerts into Microsoft Sentinel through the Armis REST API. Refer to the API documentation: `https://.armis.com/api/v1/docs` for more information. The connector provides the ability to get alert information from the Armis platform and to identify and prioritize threats in your environment. Armis uses your existing infrastructure to discover and identify devices without having to deploy any agents. ","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Armis API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-ArmisAlertsAPI-parser) to create the Kusto functions alias, **ArmisAlerts**""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Armis API**\n\n Follow these instructions to create an Armis API secret key.\n 1. Log into your Armis instance\n 2. Navigate to Settings -> API Management\n 3. If the secret key has not already been created, press the Create button to create the secret key\n 4. To access the secret key, press the Show button\n 5. The secret key can now be copied and used during the Armis Alerts connector configuration""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Armis Alert data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Armis API Authorization Key(s)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Armis connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ArmisAlertsAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-ArmisAlertsAPI-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tArmis Secret Key \n\t\tArmis URL (https://.armis.com/api/v1/) \n\t\tArmis Alert Table Name \n\t\tArmis Schedule \n\t\tAvoid Duplicates (Default: true) \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Armis Alert data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-ArmisAlertsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ARMISXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tArmis Secret Key \n\t\tArmis URL (https://.armis.com/api/v1/) \n\t\tArmis Alert Table Name \n\t\tArmis Schedule \n\t\tAvoid Duplicates (Default: true) \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Armis Secret Key** is required. See the documentation to learn more about API on the `https://.armis.com/api/v1/doc`""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis/Data%20Connectors/ArmisAlerts/ArmisAlerts_API_FunctionApp.json","true" +"Armis_Activities_CL","Armis","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis","armisinc1668090987837","armis-solution","2022-08-02","2024-08-23","","Armis Corporation","Partner","https://support.armis.com/","","domains","ArmisAlertsActivities","Armis","Armis Alerts Activities","The [Armis](https://www.armis.com/) Alerts Activities connector gives the capability to ingest Armis Alerts and Activities into Microsoft Sentinel through the Armis REST API. Refer to the API documentation: `https://.armis.com/api/v1/docs` for more information. The connector provides the ability to get alert and activity information from the Armis platform and to identify and prioritize threats in your environment. Armis uses your existing infrastructure to discover and identify devices without having to deploy any agents. ","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Armis API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias ArmisActivities/ArmisAlerts and load the function code. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Armis API**\n\n Follow these instructions to create an Armis API secret key.\n 1. Log into your Armis instance\n 2. Navigate to Settings -> API Management\n 3. If the secret key has not already been created, press the Create button to create the secret key\n 4. To access the secret key, press the Show button\n 5. The secret key can now be copied and used during the Armis Alerts Activities connector configuration""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Armis Alerts Activities Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Armis Alerts Activities Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Armis Alerts Activities Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 5 - Create a Keyvault**\n\n Follow these instructions to create a new Keyvault.\n 1. In the Azure portal, Go to **Key vaults**. Click create.\n 2. Select Subsciption, Resource Group and provide unique name of keyvault.\n\n> **NOTE:** Create a separate key vault for each **API key** within one workspace.""}, {""title"": """", ""description"": ""**STEP 6 - Create Access Policy in Keyvault**\n\n Follow these instructions to create access policy in Keyvault.\n 1. Go to keyvaults, select your keyvault, go to Access policies on left side panel. Click create.\n 2. Select all keys & secrets permissions. Click next.\n 3. In the principal section, search by application name which was generated in STEP - 2. Click next.\n\n> **NOTE:** Ensure the Permission model in the Access Configuration of Key Vault is set to **'Vault access policy'**""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Armis Alerts Activities data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Armis API Authorization Key(s)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Armis connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ArmisAlertsActivitiesAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-ArmisAlertsActivitiesAPI-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tArmis Secret Key \n\t\tArmis URL (https://.armis.com/api/v1/) \n\t\tArmis Alert Table Name \n\t\tArmis Activity Table Name \n\t\tSeverity (Default: Low) \n\t\tArmis Schedule \n\t\tKeyVault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Armis Alerts Activities data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-ArmisAlertsActivitiesAPI311-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ARMISXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tArmis Secret Key \n\t\tArmis URL (https://.armis.com/api/v1/) \n\t\tArmis Alert Table Name \n\t\tArmis Activity Table Name \n\t\tSeverity (Default: Low) \n\t\tArmis Schedule \n\t\tKeyVault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Armis Secret Key** is required. See the documentation to learn more about API on the `https://.armis.com/api/v1/doc`""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis/Data%20Connectors/ArmisAlertsActivities/ArmisAlertsActivities_API_FunctionApp.json","true" +"Armis_Alerts_CL","Armis","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis","armisinc1668090987837","armis-solution","2022-08-02","2024-08-23","","Armis Corporation","Partner","https://support.armis.com/","","domains","ArmisAlertsActivities","Armis","Armis Alerts Activities","The [Armis](https://www.armis.com/) Alerts Activities connector gives the capability to ingest Armis Alerts and Activities into Microsoft Sentinel through the Armis REST API. Refer to the API documentation: `https://.armis.com/api/v1/docs` for more information. The connector provides the ability to get alert and activity information from the Armis platform and to identify and prioritize threats in your environment. Armis uses your existing infrastructure to discover and identify devices without having to deploy any agents. ","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Armis API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias ArmisActivities/ArmisAlerts and load the function code. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Armis API**\n\n Follow these instructions to create an Armis API secret key.\n 1. Log into your Armis instance\n 2. Navigate to Settings -> API Management\n 3. If the secret key has not already been created, press the Create button to create the secret key\n 4. To access the secret key, press the Show button\n 5. The secret key can now be copied and used during the Armis Alerts Activities connector configuration""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Armis Alerts Activities Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Armis Alerts Activities Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Armis Alerts Activities Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 5 - Create a Keyvault**\n\n Follow these instructions to create a new Keyvault.\n 1. In the Azure portal, Go to **Key vaults**. Click create.\n 2. Select Subsciption, Resource Group and provide unique name of keyvault.\n\n> **NOTE:** Create a separate key vault for each **API key** within one workspace.""}, {""title"": """", ""description"": ""**STEP 6 - Create Access Policy in Keyvault**\n\n Follow these instructions to create access policy in Keyvault.\n 1. Go to keyvaults, select your keyvault, go to Access policies on left side panel. Click create.\n 2. Select all keys & secrets permissions. Click next.\n 3. In the principal section, search by application name which was generated in STEP - 2. Click next.\n\n> **NOTE:** Ensure the Permission model in the Access Configuration of Key Vault is set to **'Vault access policy'**""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Armis Alerts Activities data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Armis API Authorization Key(s)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Armis connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ArmisAlertsActivitiesAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-ArmisAlertsActivitiesAPI-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tArmis Secret Key \n\t\tArmis URL (https://.armis.com/api/v1/) \n\t\tArmis Alert Table Name \n\t\tArmis Activity Table Name \n\t\tSeverity (Default: Low) \n\t\tArmis Schedule \n\t\tKeyVault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Armis Alerts Activities data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-ArmisAlertsActivitiesAPI311-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ARMISXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tArmis Secret Key \n\t\tArmis URL (https://.armis.com/api/v1/) \n\t\tArmis Alert Table Name \n\t\tArmis Activity Table Name \n\t\tSeverity (Default: Low) \n\t\tArmis Schedule \n\t\tKeyVault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Armis Secret Key** is required. See the documentation to learn more about API on the `https://.armis.com/api/v1/doc`""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis/Data%20Connectors/ArmisAlertsActivities/ArmisAlertsActivities_API_FunctionApp.json","true" +"Armis_Devices_CL","Armis","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis","armisinc1668090987837","armis-solution","2022-08-02","2024-08-23","","Armis Corporation","Partner","https://support.armis.com/","","domains","ArmisDevices","Armis","Armis Devices","The [Armis](https://www.armis.com/) Device connector gives the capability to ingest Armis Devices into Microsoft Sentinel through the Armis REST API. Refer to the API documentation: `https://.armis.com/api/v1/docs` for more information. The connector provides the ability to get device information from the Armis platform. Armis uses your existing infrastructure to discover and identify devices without having to deploy any agents. Armis can also integrate with your existing IT & security management tools to identify and classify each and every device, managed or unmanaged in your environment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Armis API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-ArmisDevice-parser) to create the Kusto functions alias, **ArmisDevice**""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Armis API**\n\n Follow these instructions to create an Armis API secret key.\n 1. Log into your Armis instance\n 2. Navigate to Settings -> API Management\n 3. If the secret key has not already been created, press the Create button to create the secret key\n 4. To access the secret key, press the Show button\n 5. The secret key can now be copied and used during the Armis Device connector configuration""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Armis Device Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Armis Device Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Armis Device Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 5 - Create a Keyvault**\n\n Follow these instructions to create a new Keyvault.\n 1. In the Azure portal, Go to **Key vaults**. Click create.\n 2. Select Subsciption, Resource Group and provide unique name of keyvault.\n\n> **NOTE:** Create a separate key vault for each **API key** within one workspace.""}, {""title"": """", ""description"": ""**STEP 6 - Create Access Policy in Keyvault**\n\n Follow these instructions to create access policy in Keyvault.\n 1. Go to keyvaults, select your keyvault, go to Access policies on left side panel. Click create.\n 2. Select all keys & secrets permissions. Click next.\n 3. In the principal section, search by application name which was generated in STEP - 2. Click next.\n\n> **NOTE:** Ensure the Permission model in the Access Configuration of Key Vault is set to **'Vault access policy'**""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Armis Device data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Armis API Authorization Key(s)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Armis connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ArmisDevice-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-ArmisDevice-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tArmis Secret Key \n\t\tArmis URL (https://.armis.com/api/v1/) \n\t\tArmis Device Table Name \n\t\tArmis Schedule \n\t\tKeyVault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Armis Device data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-ArmisDevice311-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ARMISXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tArmis Secret Key \n\t\tArmis URL (https://.armis.com/api/v1/) \n\t\tArmis Device Table Name \n\t\tArmis Schedule \n\t\tKeyVault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Armis Secret Key** is required. See the documentation to learn more about API on the `https://.armis.com/api/v1/doc`""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis/Data%20Connectors/ArmisDevice/ArmisDevice_API_FunctionApp.json","true" +"Armorblox_CL","Armorblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armorblox","armorblox1601081599926","armorblox_sentinel_1","2021-10-18","","","Armorblox","Partner","https://www.armorblox.com/contact/","","domains","Armorblox","Armorblox","Armorblox","The [Armorblox](https://www.armorblox.com/) data connector provides the capability to ingest incidents from your Armorblox instance into Microsoft Sentinel through the REST API. The connector provides ability to get events which helps to examine potential security risks, and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Armorblox API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Armorblox API**\n\n Follow the instructions to obtain the API token.\n\n1. Log in to the Armorblox portal with your credentials.\n2. In the portal, click **Settings**.\n3. In the **Settings** view, click **API Keys**\n4. Click **Create API Key**.\n5. Enter the required information.\n6. Click **Create**, and copy the API token displayed in the modal.\n7. Save API token for using in the data connector.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Armorblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Armorblox data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-armorblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **ArmorbloxAPIToken**, **ArmorbloxInstanceURL** OR **ArmorbloxInstanceName**, and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Armorblox data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-armorblox-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. Armorblox).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tArmorbloxAPIToken\n\t\tArmorbloxInstanceName OR ArmorbloxInstanceURL\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tLogAnalyticsUri (optional)\n> - Use LogAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Armorblox Instance Details"", ""description"": ""**ArmorbloxInstanceName** OR **ArmorbloxInstanceURL** is required""}, {""name"": ""Armorblox API Credentials"", ""description"": ""**ArmorbloxAPIToken** is required""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armorblox/Data%20Connectors/Armorblox_API_FunctionApp.json","true" +"CommonSecurityLog","Aruba ClearPass","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Aruba%20ClearPass","azuresentinel","azure-sentinel-solution-arubaclearpass","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ArubaClearPass","Aruba Networks","[Deprecated] Aruba ClearPass via Legacy Agent","The [Aruba ClearPass](https://www.arubanetworks.com/products/security/network-access-control/secure-access/) connector allows you to easily connect your Aruba ClearPass with Microsoft Sentinel, to create custom dashboards, alerts, and improve investigation. This gives you more insight into your organization’s network and improves your security operation capabilities.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias ArubaClearPass and load the function code or click [here](https://aka.ms/sentinel-arubaclearpass-parser).The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Aruba ClearPass logs to a Syslog agent"", ""description"": ""Configure Aruba ClearPass to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent.\n1. [Follow these instructions](https://www.arubanetworks.com/techdocs/ClearPass/6.7/PolicyManager/Content/CPPM_UserGuide/Admin/syslogExportFilters_add_syslog_filter_general.htm) to configure the Aruba ClearPass to forward syslog.\n2. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Aruba%20ClearPass/Data%20Connectors/Connector_Syslog_ArubaClearPass.json","true" +"CommonSecurityLog","Aruba ClearPass","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Aruba%20ClearPass","azuresentinel","azure-sentinel-solution-arubaclearpass","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ArubaClearPassAma","Aruba Networks","[Deprecated] Aruba ClearPass via AMA","The [Aruba ClearPass](https://www.arubanetworks.com/products/security/network-access-control/secure-access/) connector allows you to easily connect your Aruba ClearPass with Microsoft Sentinel, to create custom dashboards, alerts, and improve investigation. This gives you more insight into your organization’s network and improves your security operation capabilities.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias ArubaClearPass and load the function code or click [here](https://aka.ms/sentinel-arubaclearpass-parser).The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Forward Aruba ClearPass logs to a Syslog agent"", ""description"": ""Configure Aruba ClearPass to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent.\n1. [Follow these instructions](https://www.arubanetworks.com/techdocs/ClearPass/6.7/PolicyManager/Content/CPPM_UserGuide/Admin/syslogExportFilters_add_syslog_filter_general.htm) to configure the Aruba ClearPass to forward syslog.\n2. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Aruba%20ClearPass/Data%20Connectors/template_ArubaClearPassAMA.json","true" +"AtlassianConfluenceNativePoller_CL","AtlassianConfluenceAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianConfluenceAudit","azuresentinel","azure-sentinel-solution-atlassianconfluenceaudit","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AtlassianConfluence","Atlassian","Atlassian Confluence","The Atlassian Confluence data connector provides the capability to ingest [Atlassian Confluence audit logs](https://developer.atlassian.com/cloud/confluence/rest/api-group-audit/) into Microsoft Sentinel.","[{""title"": ""Connect Atlassian Confluence"", ""description"": ""Please insert your credentials"", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Domain Name"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{domain}}"", ""placeHolderValue"": """"}]}, ""type"": ""BasicAuth""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Atlassian Confluence API credentials"", ""description"": ""Confluence Username and Confluence Access Token are required. [See the documentation to learn more about Atlassian Confluence API](https://developer.atlassian.com/cloud/confluence/rest/intro/). Confluence domain must be provided as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianConfluenceAudit/Data%20Connectors/ConfluenceNativePollerConnector/azuredeploy_Confluence_native_poller_connector.json","true" +"Confluence_Audit_CL","AtlassianConfluenceAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianConfluenceAudit","azuresentinel","azure-sentinel-solution-atlassianconfluenceaudit","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ConfluenceAuditAPI","Atlassian","[Deprecated] Atlassian Confluence Audit","The [Atlassian Confluence](https://www.atlassian.com/software/confluence) Audit data connector provides the capability to ingest [Confluence Audit Records](https://support.atlassian.com/confluence-cloud/docs/view-the-audit-log/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Confluence REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Confluence API**\n\n [Follow the instructions](https://developer.atlassian.com/cloud/confluence/rest/intro/#auth) to obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Confluence Audit data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-confluenceaudit-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-confluenceaudit-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **ConfluenceAccessToken**, **ConfluenceUsername**, **ConfluenceHomeSiteName** (short site name part, as example HOMESITENAME from https://community.atlassian.com) and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Confluence Audit data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-confluenceauditapi-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ConflAuditXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tConfluenceUsername\n\t\tConfluenceAccessToken\n\t\tConfluenceHomeSiteName\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**ConfluenceAccessToken**, **ConfluenceUsername** is required for REST API. [See the documentation to learn more about API](https://developer.atlassian.com/cloud/confluence/rest/api-group-audit/). Check all [requirements and follow the instructions](https://developer.atlassian.com/cloud/confluence/rest/intro/#auth) for obtaining credentials.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianConfluenceAudit/Data%20Connectors/AtlassianConfluenceAuditDataConnector/ConfluenceAudit_API_FunctionApp.json","true" +"ConfluenceAuditLogs_CL","AtlassianConfluenceAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianConfluenceAudit","azuresentinel","azure-sentinel-solution-atlassianconfluenceaudit","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ConfluenceAuditCCPDefinition","Microsoft"," Atlassian Confluence Audit (via Codeless Connector Framework)","The [Atlassian Confluence](https://www.atlassian.com/software/confluence) Audit data connector provides the capability to ingest [Confluence Audit Records](https://support.atlassian.com/confluence-cloud/docs/view-the-audit-log/) events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://support.atlassian.com/confluence-cloud/docs/view-the-audit-log/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""description"": ""To enable the Atlassian Confluence connector for Microsoft Sentinel, click to add an organization, fill the form with the Confluence environment credentials and click to Connect. \n Follow [these steps](https://support.atlassian.com/atlassian-account/docs/manage-api-tokens-for-your-atlassian-account/) to create an API token.\n "", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Atlassian Confluence organization URL"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add organization"", ""title"": ""Add organization"", ""subtitle"": ""Add Atlassian Confluence organization"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Atlassian Confluence organization URL"", ""placeholder"": "".atlassian.net"", ""type"": ""string"", ""name"": ""confluenceorganizationurl""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""User Name"", ""placeholder"": ""User Name (e.g., user@example.com)"", ""type"": ""securestring"", ""name"": ""userid""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""apikey""}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Atlassian Confluence API access"", ""description"": ""Permission of [Administer Confluence](https://developer.atlassian.com/cloud/confluence/rest/v1/intro/#auth) is required to get access to the Confluence Audit logs API. See [Confluence API documentation](https://developer.atlassian.com/cloud/confluence/rest/v1/api-group-audit/#api-wiki-rest-api-audit-get) to learn more about the audit API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianConfluenceAudit/Data%20Connectors/AtlassianConfluenceAuditLogs_CCP/AtlassianConfluenceAudit_DataConnectorDefinition.json","true" +"Jira_Audit_CL","AtlassianJiraAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianJiraAudit","azuresentinel","azure-sentinel-solution-atlassianjiraaudit","2022-01-10","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","JiraAuditAPI","Atlassian","Atlassian Jira Audit","The [Atlassian Jira](https://www.atlassian.com/software/jira) Audit data connector provides the capability to ingest [Jira Audit Records](https://support.atlassian.com/jira-cloud-administration/docs/audit-activities-in-jira-applications/) events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-audit-records/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Jira REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-jiraauditapi-parser) to create the Kusto functions alias, **JiraAudit**""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Jira API**\n\n [Follow the instructions](https://developer.atlassian.com/cloud/jira/platform/rest/v3/intro/#authentication) to obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Jira Audit data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentineljiraauditazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentineljiraauditazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **JiraAccessToken**, **JiraUsername**, **JiraHomeSiteName** (short site name part, as example HOMESITENAME from https://community.atlassian.com) and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Jira Audit data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-jiraauditapi-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. JiraAuditXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tJiraUsername\n\t\tJiraAccessToken\n\t\tJiraHomeSiteName\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**JiraAccessToken**, **JiraUsername** is required for REST API. [See the documentation to learn more about API](https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-audit-records/). Check all [requirements and follow the instructions](https://developer.atlassian.com/cloud/jira/platform/rest/v3/intro/#authentication) for obtaining credentials.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianJiraAudit/Data%20Connectors/JiraAudit_API_FunctionApp.json","true" +"Jira_Audit_v2_CL","AtlassianJiraAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianJiraAudit","azuresentinel","azure-sentinel-solution-atlassianjiraaudit","2022-01-10","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","JiraAuditCCPDefinition","Microsoft","Atlassian Jira Audit (using REST API)","The [Atlassian Jira](https://www.atlassian.com/software/jira) Audit data connector provides the capability to ingest [Jira Audit Records](https://support.atlassian.com/jira-cloud-administration/docs/audit-activities-in-jira-applications/) events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-audit-records/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""description"": ""To enable the Atlassian Jira connector for Microsoft Sentinel, click to add an organization, fill the form with the Jira environment credentials and click to Connect. \n Follow [these steps](https://support.atlassian.com/atlassian-account/docs/manage-api-tokens-for-your-atlassian-account/) to create an API token.\n "", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Atlassian Jira organization URL"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add organization"", ""title"": ""Add organization"", ""subtitle"": ""Add Atlassian Jira organization"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Atlassian Jira organization URL"", ""placeholder"": ""Atlassian Jira organization URL"", ""type"": ""string"", ""name"": ""jiraorganizationurl""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""User Name"", ""placeholder"": ""User Name (e.g., user@example.com)"", ""type"": ""securestring"", ""name"": ""userid""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""API Key"", ""type"": ""password"", ""name"": ""apikey""}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Atlassian Jira API access"", ""description"": ""Permission of [Administer Jira](https://developer.atlassian.com/cloud/jira/platform/rest/v3/intro/#authentication) is required to get access to the Jira Audit logs API. See [Jira API documentation](https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-audit-records/#api-group-audit-records) to learn more about the audit API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianJiraAudit/Data%20Connectors/JiraAuditAPISentinelConnector_ccpv2/JiraAudit_DataConnectorDefinition.json","true" +"","Attacker Tools Threat Protection Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Attacker%20Tools%20Threat%20Protection%20Essentials","azuresentinel","azure-sentinel-solution-attackertools","2022-11-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","Australian Cyber Security Centre","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Australian%20Cyber%20Security%20Centre","azuresentinel","azure-sentinel-solution-australiancybersecurity","2022-11-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"Auth0AM_CL","Auth0","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Auth0","azuresentinel","azure-sentinel-solution-auth0","2022-08-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Auth0","Auth0","Auth0 Access Management","The [Auth0 Access Management](https://auth0.com/access-management) data connector provides the capability to ingest [Auth0 log events](https://auth0.com/docs/api/management/v2/#!/Logs/get_logs) into Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Auth0 Management APIs to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Auth0 Management API**\n\n Follow the instructions to obtain the credentials.\n\n1. In Auth0 Dashboard, go to **Applications > Applications**.\n2. Select your Application. This should be a \""Machine-to-Machine\"" Application configured with at least **read:logs** and **read:logs_users** permissions.\n3. Copy **Domain, ClientID, Client Secret**""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Auth0 Access Management data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Auth0 Access Management data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-Auth0AccessManagement-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the ****Domain, ClientID, Client Secret****, **AzureSentinelWorkspaceId**, **AzureSentinelSharedKey**. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Auth0 Access Management data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-Auth0AccessManagement-azuredeploy) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. Auth0AMXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tDOMAIN\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**API token** is required. [See the documentation to learn more about API token](https://auth0.com/docs/secure/tokens/access-tokens/get-management-api-access-tokens-for-production)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Auth0/Data%20Connectors/Auth0_FunctionApp.json","true" +"Auth0Logs_CL","Auth0","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Auth0","azuresentinel","azure-sentinel-solution-auth0","2022-08-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Auth0ConnectorCCPDefinition","Microsoft","Auth0 Logs","The [Auth0](https://auth0.com/docs/api/management/v2/logs/get-logs) data connector allows ingesting logs from Auth0 API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses Auth0 API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### STEP 1 - Configuration steps for the Auth0 Management API""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Follow the instructions to obtain the credentials. \n 1. In Auth0 Dashboard, go to [**Applications > Applications**]\n 2. Select your Application. This should be a [**Machine-to-Machine**] Application configured with at least [**read:logs**] and [**read:logs_users**] permissions. \n 3. Copy [**Domain, ClientID, Client Secret**]""}}, {""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://example.auth0.com"", ""type"": ""text"", ""name"": ""Domain""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""Client ID"", ""placeholder"": ""Client ID"", ""type"": ""text"", ""name"": ""ClientId""}, ""type"": ""Textbox""}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client Secret"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""ClientSecret""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Auth0/Data%20Connectors/Auth0_CCP/DataConnectorDefinition.json","true" +"Authomize_v2_CL","Authomize","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Authomize","authomize","azure-sentinel-solution-authomize","2023-06-15","","","Authomize","Partner","https://support.authomize.com","","domains,verticals","Authomize","Authomize","Authomize Data Connector","The Authomize Data Connector provides the capability to ingest custom log types from Authomize into Microsoft Sentinel.","[{""title"": ""1. Locate your Authomize API key"", ""description"": ""Follow the setup instructions [located under Data Connectors for Authomize](https://github.com/authomize/Open-ITDR/blob/main/Open-Connectors/Platform/Azure-Sentinel/Data%20Connectors/readme.md).""}, {""title"": ""2. Deploy the Authomize data connector using the setup instructions."", ""description"": ""Follow the Instructions on [deploying the data connector to ingest data from Authomize](https://github.com/authomize/Open-ITDR/blob/main/Open-Connectors/Platform/Azure-Sentinel/Data%20Connectors/readme.md).""}, {""title"": ""3. Finalize your setup"", ""description"": ""Validate that your script is running. Simple instructions are located under the [Authomize Data Connector area](https://github.com/authomize/Open-ITDR/blob/main/Open-Connectors/Platform/Azure-Sentinel/Data%20Connectors/readme.md).""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Include custom pre-requisites if the connectivity requires - else delete customs"", ""description"": ""Description for any custom pre-requisite""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Authomize/Data%20Connectors/AuthomizeCustomConnector.json","true" +"AzureActivity","Azure Activity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Activity","azuresentinel","azure-sentinel-solution-azureactivity","2022-04-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureActivity","Microsoft","Azure Activity","Azure Activity Log is a subscription log that provides insight into subscription-level events that occur in Azure, including events from Azure Resource Manager operational data, service health events, write operations taken on the resources in your subscription, and the status of activities performed in Azure. For more information, see the [Microsoft Sentinel documentation ](https://go.microsoft.com/fwlink/p/?linkid=2219695&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""instructions"": [{""parameters"": {""text"": ""This connector has been updated to use the diagnostics settings back-end pipeline. which provides increased functionality and better consistency with resource logs.\nConnectors using this pipeline can also be governed at scale by Azure Policy. Learn more about the new Azure Activity connector.\nFollow the instructions below to upgrade your connector to the diagnostics settings pipeline."", ""visible"": true, ""inline"": false}, ""type"": ""InfoMessage""}]}, {""title"": ""1. Disconnect your subscriptions from the legacy method"", ""description"": ""The subscriptions listed below are still using the older, legacy method. You are strongly encouraged to upgrade to the new pipeline.
\nTo do this, click on the 'Disconnect All' button below, before proceeding to launch the Azure Policy Assignment wizard."", ""instructions"": [{""parameters"": {""datasourceName"": ""AzureActivityLog""}, ""type"": ""OmsDatasource""}]}, {""title"": ""2. Connect your subscriptions through diagnostic settings new pipeline"", ""description"": ""This connector uses Azure Policy to apply a single Azure Subscription log-streaming configuration to a collection of subscriptions, defined as a scope.\nFollow the instructions below to create and apply a policy to all current and future subscriptions. **Note**, you may already have an active policy for this resource type."", ""innerSteps"": [{""title"": ""Launch the Azure Policy Assignment wizard and follow the steps.\u200b"", ""description"": "">1. In the **Basics** tab, click the button with the three dots under **Scope** to select your resources assignment scope.\n >2. In the **Parameters** tab, choose your Microsoft Sentinel workspace from the **Log Analytics workspace** drop-down list, and leave marked as \""True\"" all the log and metric types you want to ingest.\n >3. To apply the policy on your existing resources, select the **Remediation tab** and mark the **Create a remediation task** checkbox."", ""instructions"": [{""parameters"": {""linkType"": ""OpenPolicyAssignment"", ""policyDefinitionGuid"": ""2465583e-4e78-4c15-b6be-a36cbc7c8b0f"", ""assignMode"": 1}, ""type"": ""InstallAgent""}]}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Policy\u200b"", ""description"": ""owner role assigned for each policy assignment scope.\u200b""}, {""name"": ""Subscription"", ""description"": ""owner role permission on the relevant subscription""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Activity/Data%20Connectors/AzureActivity.json","true" +"","Azure Batch Account","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Batch%20Account","azuresentinel","azure-sentinel-solution-batchaccount","2022-06-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"fluentbit_CL","Azure Cloud NGFW by Palo Alto Networks","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Cloud%20NGFW%20by%20Palo%20Alto%20Networks","paloaltonetworks","cloudngfw-sentinel-solution","2023-11-03","2023-11-03","","Palo Alto Networks","Partner","https://support.paloaltonetworks.com","","domains","AzureCloudNGFWByPaloAltoNetworks","Palo Alto Networks","Azure CloudNGFW By Palo Alto Networks","Cloud Next-Generation Firewall by Palo Alto Networks - an Azure Native ISV Service - is Palo Alto Networks Next-Generation Firewall (NGFW) delivered as a cloud-native service on Azure. You can discover Cloud NGFW in the Azure Marketplace and consume it in your Azure Virtual Networks (VNet). With Cloud NGFW, you can access the core NGFW capabilities such as App-ID, URL filtering based technologies. It provides threat prevention and detection through cloud-delivered security services and threat prevention signatures. The connector allows you to easily connect your Cloud NGFW logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities. For more information, see the [Cloud NGFW for Azure documentation](https://docs.paloaltonetworks.com/cloud-ngfw/azure).","[{""title"": ""Connect Cloud NGFW by Palo Alto Networks to Microsoft Sentinel"", ""description"": ""Enable Log Settings on All Cloud NGFWs by Palo Alto Networks."", ""instructions"": [{""parameters"": {""linkType"": ""OpenCloudNGFW""}, ""type"": ""ConfigureLogSettings""}]}, {""title"": """", ""description"": ""Inside your Cloud NGFW resource:\n\n1. Navigate to the **Log Settings** from the homepage.\n2. Ensure the **Enable Log Settings** checkbox is checked.\n3. From the **Log Settings** drop-down, choose the desired Log Analytics Workspace.\n4. Confirm your selections and configurations.\n5. Click **Save** to apply the settings.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Cloud%20NGFW%20by%20Palo%20Alto%20Networks/Data%20Connectors/CloudNgfwByPAN.json","true" +"","Azure Cognitive Search","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Cognitive%20Search","azuresentinel","azure-sentinel-solution-azurecognitivesearch","2022-06-28","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","Azure DDoS Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20DDoS%20Protection","azuresentinel","azure-sentinel-solution-azureddosprotection","2022-05-13","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","Azure Data Lake Storage Gen1","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Data%20Lake%20Storage%20Gen1","azuresentinel","azure-sentinel-solution-datalakestoragegen1","2022-06-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"","Azure Event Hubs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Event%20Hubs","azuresentinel","azure-sentinel-solution-eventhub","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","Azure Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall","sentinel4azurefirewall","sentinel4azurefirewall","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"","Azure Key Vault","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Key%20Vault","azuresentinel","azure-sentinel-solution-azurekeyvault","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","Azure Logic Apps","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Logic%20Apps","azuresentinel","azure-sentinel-solution-logicapps","2022-06-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","Azure Network Security Groups","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Network%20Security%20Groups","azuresentinel","azure-sentinel-solution-networksecuritygroup","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","Azure SQL Database solution for sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20SQL%20Database%20solution%20for%20sentinel","sentinel4sql","sentinel4sql","2022-08-19","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"","Azure Service Bus","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Service%20Bus","azuresentinel","azure-sentinel-solution-servicebus","2022-06-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","Azure Storage","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Storage","azuresentinel","azure-sentinel-solution-azurestorageaccount","2022-05-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","Azure Stream Analytics","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Stream%20Analytics","azuresentinel","azure-sentinel-solution-streamanalytics","2022-06-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","Azure Web Application Firewall (WAF)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Web%20Application%20Firewall%20%28WAF%29","azuresentinel","azure-sentinel-solution-azurewebapplicationfirewal","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"","Azure kubernetes Service","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20kubernetes%20Service","azuresentinel","azure-sentinel-solution-azurekubernetes","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"ADOAuditLogs_CL","AzureDevOpsAuditing","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AzureDevOpsAuditing","azuresentinel","azure-sentinel-solution-azuredevopsauditing","2022-09-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureDevOpsAuditLogs","Microsoft","Azure DevOps Audit Logs (via Codeless Connector Platform)","The Azure DevOps Audit Logs data connector allows you to ingest audit events from Azure DevOps into Microsoft Sentinel. This data connector is built using the Microsoft Sentinel Codeless Connector Platform, ensuring seamless integration. It leverages the Azure DevOps Audit Logs API to fetch detailed audit events and supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview). These transformations enable parsing of the received audit data into a custom table during ingestion, improving query performance by eliminating the need for additional parsing. By using this connector, you can gain enhanced visibility into your Azure DevOps environment and streamline your security operations.","[{""title"": ""Connect to Azure DevOps to start collecting Audit logs in Microsoft Sentinel.\n"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""\n1. Enter the App you have registered.\n 2. In the 'Overview' section, copy the Application (client) ID.\n 3. Select the 'Endpoints' button, and copy the 'OAuth 2.0 authorization endpoint (v2)' value and the 'OAuth 2.0 token endpoint (v2)' value.\n 4. In the 'Certificates & secrets' section, copy the 'Client Secret value', and store it securely.\n5. Provide the required information below and click 'Connect'.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Token Endpoint"", ""name"": ""tokenEndpoint"", ""placeholder"": ""https://login.microsoftonline.com/{TenantId}/oauth2/v2.0/token"", ""type"": ""text"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Authorization Endpoint"", ""name"": ""authorizationEndpoint"", ""placeholder"": ""https://login.microsoftonline.com/{TenantId}/oauth2/v2.0/authorize"", ""type"": ""text"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Endpoint"", ""name"": ""apiEndpoint"", ""placeholder"": ""https://auditservice.dev.azure.com/{organizationName}/_apis/audit/auditlog?api-version=7.2-preview"", ""type"": ""text"", ""validations"": {""required"": true}}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""App Client ID"", ""clientSecretLabel"": ""App Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure DevOps Prerequisite"", ""description"": ""Please ensure the following:
1. Register an Entra App in Microsoft Entra Admin Center under App Registrations.
2. In 'API permissions' - add Permissions to 'Azure DevOps - vso.auditlog'.
3. In 'Certificates & secrets' - generate 'Client secret'.
4. In 'Authentication' - add Redirect URI: 'https://portal.azure.com/TokenAuthorize/ExtensionName/Microsoft_Azure_Security_Insights'.
5. In the Azure DevOps settings - enable audit log and set **View audit log** for the user. [Azure DevOps Auditing](https://learn.microsoft.com/en-us/azure/devops/organizations/audit/azure-devops-auditing?view=azure-devops&tabs=preview-page).
6. Ensure the user assigned to connect the data connector has the View audit logs permission explicitly set to Allow at all times. This permission is essential for successful log ingestion. If the permission is revoked or not granted, data ingestion will fail or be interrupted.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AzureDevOpsAuditing/Data%20Connectors/AzureDevOpsAuditLogs_CCP/AzureDevOpsAuditLogs_DataConnectorDefinition.json","true" +"","AzureSecurityBenchmark","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AzureSecurityBenchmark","azuresentinel","azure-sentinel-solution-azuresecuritybenchmark","2022-06-17","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"BetterMTDAppLog_CL","BETTER Mobile Threat Defense (MTD)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BETTER%20Mobile%20Threat%20Defense%20%28MTD%29","bettermobilesecurityinc","better_mtd_mss","2022-05-02","","","Better Mobile Security Inc.","Partner","https://www.better.mobi/about#contact-us","","domains","BetterMTD","BETTER Mobile","BETTER Mobile Threat Defense (MTD)","The BETTER MTD Connector allows Enterprises to connect their Better MTD instances with Microsoft Sentinel, to view their data in Dashboards, create custom alerts, use it to trigger playbooks and expands threat hunting capabilities. This gives users more insight into their organization's mobile devices and ability to quickly analyze current mobile security posture which improves their overall SecOps capabilities.","[{""title"": """", ""description"": ""1. In **Better MTD Console**, click on **Integration** on the side bar.\n2. Select **Others** tab.\n3. Click the **ADD ACCOUNT** button and Select **Microsoft Sentinel** from the available integrations.\n4. Create the Integration:\n - set `ACCOUNT NAME` to a descriptive name that identifies the integration then click **Next**\n - Enter your `WORKSPACE ID` and `PRIMARY KEY` from the fields below, click **Save**\n - Click **Done**\n5. Threat Policy setup (Which Incidents should be reported to `Microsoft Sentinel`):\n - In **Better MTD Console**, click on **Policies** on the side bar\n - Click on the **Edit** button of the Policy that you are using.\n - For each Incident types that you want to be logged go to **Send to Integrations** field and select **Sentinel**\n6. For additional information, please refer to our [Documentation](https://mtd-docs.bmobi.net/integrations/how-to-setup-azure-sentinel-integration#mtd-integration-configuration)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BETTER%20Mobile%20Threat%20Defense%20%28MTD%29/Data%20Connectors/BETTERMTD.json","true" +"BetterMTDDeviceLog_CL","BETTER Mobile Threat Defense (MTD)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BETTER%20Mobile%20Threat%20Defense%20%28MTD%29","bettermobilesecurityinc","better_mtd_mss","2022-05-02","","","Better Mobile Security Inc.","Partner","https://www.better.mobi/about#contact-us","","domains","BetterMTD","BETTER Mobile","BETTER Mobile Threat Defense (MTD)","The BETTER MTD Connector allows Enterprises to connect their Better MTD instances with Microsoft Sentinel, to view their data in Dashboards, create custom alerts, use it to trigger playbooks and expands threat hunting capabilities. This gives users more insight into their organization's mobile devices and ability to quickly analyze current mobile security posture which improves their overall SecOps capabilities.","[{""title"": """", ""description"": ""1. In **Better MTD Console**, click on **Integration** on the side bar.\n2. Select **Others** tab.\n3. Click the **ADD ACCOUNT** button and Select **Microsoft Sentinel** from the available integrations.\n4. Create the Integration:\n - set `ACCOUNT NAME` to a descriptive name that identifies the integration then click **Next**\n - Enter your `WORKSPACE ID` and `PRIMARY KEY` from the fields below, click **Save**\n - Click **Done**\n5. Threat Policy setup (Which Incidents should be reported to `Microsoft Sentinel`):\n - In **Better MTD Console**, click on **Policies** on the side bar\n - Click on the **Edit** button of the Policy that you are using.\n - For each Incident types that you want to be logged go to **Send to Integrations** field and select **Sentinel**\n6. For additional information, please refer to our [Documentation](https://mtd-docs.bmobi.net/integrations/how-to-setup-azure-sentinel-integration#mtd-integration-configuration)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BETTER%20Mobile%20Threat%20Defense%20%28MTD%29/Data%20Connectors/BETTERMTD.json","true" +"BetterMTDIncidentLog_CL","BETTER Mobile Threat Defense (MTD)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BETTER%20Mobile%20Threat%20Defense%20%28MTD%29","bettermobilesecurityinc","better_mtd_mss","2022-05-02","","","Better Mobile Security Inc.","Partner","https://www.better.mobi/about#contact-us","","domains","BetterMTD","BETTER Mobile","BETTER Mobile Threat Defense (MTD)","The BETTER MTD Connector allows Enterprises to connect their Better MTD instances with Microsoft Sentinel, to view their data in Dashboards, create custom alerts, use it to trigger playbooks and expands threat hunting capabilities. This gives users more insight into their organization's mobile devices and ability to quickly analyze current mobile security posture which improves their overall SecOps capabilities.","[{""title"": """", ""description"": ""1. In **Better MTD Console**, click on **Integration** on the side bar.\n2. Select **Others** tab.\n3. Click the **ADD ACCOUNT** button and Select **Microsoft Sentinel** from the available integrations.\n4. Create the Integration:\n - set `ACCOUNT NAME` to a descriptive name that identifies the integration then click **Next**\n - Enter your `WORKSPACE ID` and `PRIMARY KEY` from the fields below, click **Save**\n - Click **Done**\n5. Threat Policy setup (Which Incidents should be reported to `Microsoft Sentinel`):\n - In **Better MTD Console**, click on **Policies** on the side bar\n - Click on the **Edit** button of the Policy that you are using.\n - For each Incident types that you want to be logged go to **Send to Integrations** field and select **Sentinel**\n6. For additional information, please refer to our [Documentation](https://mtd-docs.bmobi.net/integrations/how-to-setup-azure-sentinel-integration#mtd-integration-configuration)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BETTER%20Mobile%20Threat%20Defense%20%28MTD%29/Data%20Connectors/BETTERMTD.json","true" +"BetterMTDNetflowLog_CL","BETTER Mobile Threat Defense (MTD)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BETTER%20Mobile%20Threat%20Defense%20%28MTD%29","bettermobilesecurityinc","better_mtd_mss","2022-05-02","","","Better Mobile Security Inc.","Partner","https://www.better.mobi/about#contact-us","","domains","BetterMTD","BETTER Mobile","BETTER Mobile Threat Defense (MTD)","The BETTER MTD Connector allows Enterprises to connect their Better MTD instances with Microsoft Sentinel, to view their data in Dashboards, create custom alerts, use it to trigger playbooks and expands threat hunting capabilities. This gives users more insight into their organization's mobile devices and ability to quickly analyze current mobile security posture which improves their overall SecOps capabilities.","[{""title"": """", ""description"": ""1. In **Better MTD Console**, click on **Integration** on the side bar.\n2. Select **Others** tab.\n3. Click the **ADD ACCOUNT** button and Select **Microsoft Sentinel** from the available integrations.\n4. Create the Integration:\n - set `ACCOUNT NAME` to a descriptive name that identifies the integration then click **Next**\n - Enter your `WORKSPACE ID` and `PRIMARY KEY` from the fields below, click **Save**\n - Click **Done**\n5. Threat Policy setup (Which Incidents should be reported to `Microsoft Sentinel`):\n - In **Better MTD Console**, click on **Policies** on the side bar\n - Click on the **Edit** button of the Policy that you are using.\n - For each Incident types that you want to be logged go to **Send to Integrations** field and select **Sentinel**\n6. For additional information, please refer to our [Documentation](https://mtd-docs.bmobi.net/integrations/how-to-setup-azure-sentinel-integration#mtd-integration-configuration)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BETTER%20Mobile%20Threat%20Defense%20%28MTD%29/Data%20Connectors/BETTERMTD.json","true" +"Syslog","Barracuda CloudGen Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Barracuda%20CloudGen%20Firewall","microsoftsentinelcommunity","azure-sentinel-solution-barracudacloudgenfirewall","2021-05-02","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","BarracudaCloudFirewall","Barracuda","[Deprecated] Barracuda CloudGen Firewall","The Barracuda CloudGen Firewall (CGFW) connector allows you to easily connect your Barracuda CGFW logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias CGFWFirewallActivity and load the function code or click [here](https://aka.ms/sentinel-barracudacloudfirewall-parser). The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n2. Select **Apply below configuration to my machines** and select the facilities and severities.\n3. Click **Save**.""}, {""title"": ""Configure and connect the Barracuda CloudGen Firewall"", ""description"": ""[Follow instructions](https://aka.ms/sentinel-barracudacloudfirewall-connector) to configure syslog streaming. Use the IP address or hostname for the Linux machine with the Microsoft Sentinel agent installed for the Destination IP address."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Barracuda CloudGen Firewall"", ""description"": ""must be configured to export logs via Syslog""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Barracuda%20CloudGen%20Firewall/Data%20Connectors/template_BarracudaCloudFirewall.json","true" +"Barracuda_CL","Barracuda WAF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Barracuda%20WAF","barracudanetworks","barracuda_web_application_firewall_mss","2022-05-13","","","Barracuda","Partner","https://www.barracuda.com/support","","domains","Barracuda","Barracuda","[Deprecated] Barracuda Web Application Firewall via Legacy Agent","The Barracuda Web Application Firewall (WAF) connector allows you to easily connect your Barracuda logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization’s network and improves your security operation capabilities.

[For more information >​](https://aka.ms/CEF-Barracuda)","[{""title"": ""Configure and connect Barracuda WAF"", ""description"": ""The Barracuda Web Application Firewall can integrate with and export logs directly to Microsoft Sentinel via Azure OMS Server.\u200b\n\n1. Go to [Barracuda WAF configuration](https://aka.ms/asi-barracuda-connector), and follow the instructions, using the parameters below to set up the connection:.\n\n2. Web Firewall logs facility: Go to the advanced settings (link below) for your workspace and on the **Data > Syslog** tabs, make sure that the facility exists.\u200b\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}, {""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Barracuda%20WAF/Data%20Connectors/template_Barracuda.json","true" +"CommonSecurityLog","Barracuda WAF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Barracuda%20WAF","barracudanetworks","barracuda_web_application_firewall_mss","2022-05-13","","","Barracuda","Partner","https://www.barracuda.com/support","","domains","Barracuda","Barracuda","[Deprecated] Barracuda Web Application Firewall via Legacy Agent","The Barracuda Web Application Firewall (WAF) connector allows you to easily connect your Barracuda logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization’s network and improves your security operation capabilities.

[For more information >​](https://aka.ms/CEF-Barracuda)","[{""title"": ""Configure and connect Barracuda WAF"", ""description"": ""The Barracuda Web Application Firewall can integrate with and export logs directly to Microsoft Sentinel via Azure OMS Server.\u200b\n\n1. Go to [Barracuda WAF configuration](https://aka.ms/asi-barracuda-connector), and follow the instructions, using the parameters below to set up the connection:.\n\n2. Web Firewall logs facility: Go to the advanced settings (link below) for your workspace and on the **Data > Syslog** tabs, make sure that the facility exists.\u200b\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}, {""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Barracuda%20WAF/Data%20Connectors/template_Barracuda.json","true" +"barracuda_CL","Barracuda WAF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Barracuda%20WAF","barracudanetworks","barracuda_web_application_firewall_mss","2022-05-13","","","Barracuda","Partner","https://www.barracuda.com/support","","domains","Barracuda","Barracuda","[Deprecated] Barracuda Web Application Firewall via Legacy Agent","The Barracuda Web Application Firewall (WAF) connector allows you to easily connect your Barracuda logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization’s network and improves your security operation capabilities.

[For more information >​](https://aka.ms/CEF-Barracuda)","[{""title"": ""Configure and connect Barracuda WAF"", ""description"": ""The Barracuda Web Application Firewall can integrate with and export logs directly to Microsoft Sentinel via Azure OMS Server.\u200b\n\n1. Go to [Barracuda WAF configuration](https://aka.ms/asi-barracuda-connector), and follow the instructions, using the parameters below to set up the connection:.\n\n2. Web Firewall logs facility: Go to the advanced settings (link below) for your workspace and on the **Data > Syslog** tabs, make sure that the facility exists.\u200b\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}, {""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Barracuda%20WAF/Data%20Connectors/template_Barracuda.json","true" +"beSECURE_Audit_CL","Beyond Security beSECURE","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Beyond%20Security%20beSECURE","azuresentinel","azure-sentinel-solution-isvtesting12","2022-05-02","","","Beyond Security","Partner","https://beyondsecurity.freshdesk.com/support/home","","domains","BeyondSecuritybeSECURE","Beyond Security","Beyond Security beSECURE","The [Beyond Security beSECURE](https://beyondsecurity.com/) connector allows you to easily connect your Beyond Security beSECURE scan events, scan results and audit trail with Azure Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": ""Configure beSECURE"", ""description"": ""Follow the steps below to configure your beSECURE solution to send out scan results, scan status and audit trail to Azure Sentinel."", ""innerSteps"": [{""title"": ""1. Access the Integration menu"", ""description"": ""1.1 Click on the 'More' menu option\n\n1.2 Select Server\n\n1.3 Select Integration\n\n1.4 Enable Azure Sentinel""}, {""title"": ""2. Provide Azure Sentinel settings"", ""description"": ""Fill in the Workspace ID and Primary Key values, click 'Modify'"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Beyond%20Security%20beSECURE/Data%20Connectors/Beyond%20Security%20beSECURE.json","true" +"beSECURE_ScanEvent_CL","Beyond Security beSECURE","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Beyond%20Security%20beSECURE","azuresentinel","azure-sentinel-solution-isvtesting12","2022-05-02","","","Beyond Security","Partner","https://beyondsecurity.freshdesk.com/support/home","","domains","BeyondSecuritybeSECURE","Beyond Security","Beyond Security beSECURE","The [Beyond Security beSECURE](https://beyondsecurity.com/) connector allows you to easily connect your Beyond Security beSECURE scan events, scan results and audit trail with Azure Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": ""Configure beSECURE"", ""description"": ""Follow the steps below to configure your beSECURE solution to send out scan results, scan status and audit trail to Azure Sentinel."", ""innerSteps"": [{""title"": ""1. Access the Integration menu"", ""description"": ""1.1 Click on the 'More' menu option\n\n1.2 Select Server\n\n1.3 Select Integration\n\n1.4 Enable Azure Sentinel""}, {""title"": ""2. Provide Azure Sentinel settings"", ""description"": ""Fill in the Workspace ID and Primary Key values, click 'Modify'"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Beyond%20Security%20beSECURE/Data%20Connectors/Beyond%20Security%20beSECURE.json","true" +"beSECURE_ScanResults_CL","Beyond Security beSECURE","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Beyond%20Security%20beSECURE","azuresentinel","azure-sentinel-solution-isvtesting12","2022-05-02","","","Beyond Security","Partner","https://beyondsecurity.freshdesk.com/support/home","","domains","BeyondSecuritybeSECURE","Beyond Security","Beyond Security beSECURE","The [Beyond Security beSECURE](https://beyondsecurity.com/) connector allows you to easily connect your Beyond Security beSECURE scan events, scan results and audit trail with Azure Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": ""Configure beSECURE"", ""description"": ""Follow the steps below to configure your beSECURE solution to send out scan results, scan status and audit trail to Azure Sentinel."", ""innerSteps"": [{""title"": ""1. Access the Integration menu"", ""description"": ""1.1 Click on the 'More' menu option\n\n1.2 Select Server\n\n1.3 Select Integration\n\n1.4 Enable Azure Sentinel""}, {""title"": ""2. Provide Azure Sentinel settings"", ""description"": ""Fill in the Workspace ID and Primary Key values, click 'Modify'"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Beyond%20Security%20beSECURE/Data%20Connectors/Beyond%20Security%20beSECURE.json","true" +"BigIDDSPMCatalog_CL","BigID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BigID","bigid","azure-sentinel-solution-bigid","2025-10-07","","","BigID","Partner","https://www.bigid.com/support","","domains","BigIDDSPMLogsConnectorDefinition","BigID","BigID DSPM connector","The [BigID DSPM](https://bigid.com/data-security-posture-management/) data connector provides the capability to ingest BigID DSPM cases with affected objects and datasource information into Microsoft Sentinel.","[{""description"": ""Provide your BigID domain name like 'customer.bigid.cloud' and your BigID token. Generate a token in the BigID console via Settings -> Access Management -> Users -> Select User and generate a token."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""BigID FQDN"", ""placeholder"": ""BigID FQDN"", ""type"": ""text"", ""name"": ""bigidFqdn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""BigID Token"", ""placeholder"": ""BigID Token"", ""type"": ""password"", ""name"": ""bigidToken"", ""validations"": {""required"": true}}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""title"": ""Connect to BigID DSPM API to start collecting BigID DSPM cases and affected Objects in Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""BigID DSPM API access"", ""description"": ""Access to the BigID DSPM API through a BigID Token is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BigID/Data%20Connectors/BigIDDSPMLogs_ccp/BigIDDSPMLogs_connectorDefinition.json","true" +"BitsightAlerts_data_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the BitSight API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create/Get Bitsight API Token**\n\n Follow these instructions to get a BitSight API Token.\n 1. For SPM App: Refer to the [User Preference](https://service.bitsight.com/app/spm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 2. For TPRM App: Refer to the [User Preference](https://service.bitsight.com/app/tprm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 3. For Classic BitSight: Go to your [Account](https://service.bitsight.com/settings) page, \n\t\tGo to Settings > Account > API Token.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of BitSight Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the BitSight data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the BitSight API Token."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the BitSight connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BitSight-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Review + create** to deploy..""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the BitSight data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-BitSight310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitSightXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""BitSight API Token is required. See the documentation to [learn more](https://help.bitsighttech.com/hc/en-us/articles/115014888388-API-Token-Management) about API Token.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" +"BitsightBreaches_data_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the BitSight API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create/Get Bitsight API Token**\n\n Follow these instructions to get a BitSight API Token.\n 1. For SPM App: Refer to the [User Preference](https://service.bitsight.com/app/spm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 2. For TPRM App: Refer to the [User Preference](https://service.bitsight.com/app/tprm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 3. For Classic BitSight: Go to your [Account](https://service.bitsight.com/settings) page, \n\t\tGo to Settings > Account > API Token.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of BitSight Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the BitSight data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the BitSight API Token."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the BitSight connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BitSight-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Review + create** to deploy..""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the BitSight data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-BitSight310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitSightXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""BitSight API Token is required. See the documentation to [learn more](https://help.bitsighttech.com/hc/en-us/articles/115014888388-API-Token-Management) about API Token.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" +"BitsightCompany_details_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the BitSight API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create/Get Bitsight API Token**\n\n Follow these instructions to get a BitSight API Token.\n 1. For SPM App: Refer to the [User Preference](https://service.bitsight.com/app/spm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 2. For TPRM App: Refer to the [User Preference](https://service.bitsight.com/app/tprm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 3. For Classic BitSight: Go to your [Account](https://service.bitsight.com/settings) page, \n\t\tGo to Settings > Account > API Token.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of BitSight Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the BitSight data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the BitSight API Token."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the BitSight connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BitSight-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Review + create** to deploy..""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the BitSight data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-BitSight310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitSightXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""BitSight API Token is required. See the documentation to [learn more](https://help.bitsighttech.com/hc/en-us/articles/115014888388-API-Token-Management) about API Token.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" +"BitsightCompany_rating_details_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the BitSight API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create/Get Bitsight API Token**\n\n Follow these instructions to get a BitSight API Token.\n 1. For SPM App: Refer to the [User Preference](https://service.bitsight.com/app/spm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 2. For TPRM App: Refer to the [User Preference](https://service.bitsight.com/app/tprm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 3. For Classic BitSight: Go to your [Account](https://service.bitsight.com/settings) page, \n\t\tGo to Settings > Account > API Token.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of BitSight Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the BitSight data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the BitSight API Token."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the BitSight connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BitSight-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Review + create** to deploy..""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the BitSight data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-BitSight310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitSightXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""BitSight API Token is required. See the documentation to [learn more](https://help.bitsighttech.com/hc/en-us/articles/115014888388-API-Token-Management) about API Token.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" +"BitsightDiligence_historical_statistics_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the BitSight API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create/Get Bitsight API Token**\n\n Follow these instructions to get a BitSight API Token.\n 1. For SPM App: Refer to the [User Preference](https://service.bitsight.com/app/spm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 2. For TPRM App: Refer to the [User Preference](https://service.bitsight.com/app/tprm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 3. For Classic BitSight: Go to your [Account](https://service.bitsight.com/settings) page, \n\t\tGo to Settings > Account > API Token.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of BitSight Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the BitSight data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the BitSight API Token."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the BitSight connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BitSight-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Review + create** to deploy..""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the BitSight data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-BitSight310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitSightXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""BitSight API Token is required. See the documentation to [learn more](https://help.bitsighttech.com/hc/en-us/articles/115014888388-API-Token-Management) about API Token.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" +"BitsightDiligence_statistics_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the BitSight API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create/Get Bitsight API Token**\n\n Follow these instructions to get a BitSight API Token.\n 1. For SPM App: Refer to the [User Preference](https://service.bitsight.com/app/spm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 2. For TPRM App: Refer to the [User Preference](https://service.bitsight.com/app/tprm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 3. For Classic BitSight: Go to your [Account](https://service.bitsight.com/settings) page, \n\t\tGo to Settings > Account > API Token.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of BitSight Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the BitSight data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the BitSight API Token."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the BitSight connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BitSight-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Review + create** to deploy..""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the BitSight data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-BitSight310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitSightXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""BitSight API Token is required. See the documentation to [learn more](https://help.bitsighttech.com/hc/en-us/articles/115014888388-API-Token-Management) about API Token.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" +"BitsightFindings_data_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the BitSight API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create/Get Bitsight API Token**\n\n Follow these instructions to get a BitSight API Token.\n 1. For SPM App: Refer to the [User Preference](https://service.bitsight.com/app/spm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 2. For TPRM App: Refer to the [User Preference](https://service.bitsight.com/app/tprm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 3. For Classic BitSight: Go to your [Account](https://service.bitsight.com/settings) page, \n\t\tGo to Settings > Account > API Token.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of BitSight Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the BitSight data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the BitSight API Token."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the BitSight connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BitSight-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Review + create** to deploy..""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the BitSight data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-BitSight310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitSightXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""BitSight API Token is required. See the documentation to [learn more](https://help.bitsighttech.com/hc/en-us/articles/115014888388-API-Token-Management) about API Token.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" +"BitsightFindings_summary_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the BitSight API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create/Get Bitsight API Token**\n\n Follow these instructions to get a BitSight API Token.\n 1. For SPM App: Refer to the [User Preference](https://service.bitsight.com/app/spm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 2. For TPRM App: Refer to the [User Preference](https://service.bitsight.com/app/tprm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 3. For Classic BitSight: Go to your [Account](https://service.bitsight.com/settings) page, \n\t\tGo to Settings > Account > API Token.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of BitSight Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the BitSight data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the BitSight API Token."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the BitSight connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BitSight-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Review + create** to deploy..""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the BitSight data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-BitSight310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitSightXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""BitSight API Token is required. See the documentation to [learn more](https://help.bitsighttech.com/hc/en-us/articles/115014888388-API-Token-Management) about API Token.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" +"BitsightGraph_data_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the BitSight API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create/Get Bitsight API Token**\n\n Follow these instructions to get a BitSight API Token.\n 1. For SPM App: Refer to the [User Preference](https://service.bitsight.com/app/spm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 2. For TPRM App: Refer to the [User Preference](https://service.bitsight.com/app/tprm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 3. For Classic BitSight: Go to your [Account](https://service.bitsight.com/settings) page, \n\t\tGo to Settings > Account > API Token.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of BitSight Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the BitSight data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the BitSight API Token."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the BitSight connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BitSight-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Review + create** to deploy..""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the BitSight data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-BitSight310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitSightXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""BitSight API Token is required. See the documentation to [learn more](https://help.bitsighttech.com/hc/en-us/articles/115014888388-API-Token-Management) about API Token.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" +"BitsightIndustrial_statistics_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the BitSight API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create/Get Bitsight API Token**\n\n Follow these instructions to get a BitSight API Token.\n 1. For SPM App: Refer to the [User Preference](https://service.bitsight.com/app/spm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 2. For TPRM App: Refer to the [User Preference](https://service.bitsight.com/app/tprm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 3. For Classic BitSight: Go to your [Account](https://service.bitsight.com/settings) page, \n\t\tGo to Settings > Account > API Token.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of BitSight Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the BitSight data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the BitSight API Token."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the BitSight connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BitSight-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Review + create** to deploy..""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the BitSight data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-BitSight310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitSightXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""BitSight API Token is required. See the documentation to [learn more](https://help.bitsighttech.com/hc/en-us/articles/115014888388-API-Token-Management) about API Token.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" +"BitsightObservation_statistics_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the BitSight API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create/Get Bitsight API Token**\n\n Follow these instructions to get a BitSight API Token.\n 1. For SPM App: Refer to the [User Preference](https://service.bitsight.com/app/spm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 2. For TPRM App: Refer to the [User Preference](https://service.bitsight.com/app/tprm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 3. For Classic BitSight: Go to your [Account](https://service.bitsight.com/settings) page, \n\t\tGo to Settings > Account > API Token.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of BitSight Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the BitSight data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the BitSight API Token."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the BitSight connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BitSight-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Review + create** to deploy..""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the BitSight data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-BitSight310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitSightXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""BitSight API Token is required. See the documentation to [learn more](https://help.bitsighttech.com/hc/en-us/articles/115014888388-API-Token-Management) about API Token.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" +"BitglassLogs_CL","Bitglass","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Bitglass","azuresentinel","azure-sentinel-solution-bitglass","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Bitglass","Bitglass","Bitglass","The [Bitglass](https://www.bitglass.com/) data connector provides the capability to retrieve security event logs of the Bitglass services and more events into Microsoft Sentinel through the REST API. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Azure Blob Storage API to pull logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Bitglass**](https://aka.ms/sentinel-bitglass-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Bitglass Log Retrieval API**\n\n Follow the instructions to obtain the credentials.\n\n1. Please contact Bitglass [support](https://pages.bitglass.com/Contact.html) and obtain the **BitglassToken** and **BitglassServiceURL** ntation].\n2. Save credentials for using in the data connector.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Bitglass data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Bitglass data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-bitglass-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **BitglassToken**, **BitglassServiceURL** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Bitglass data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-bitglass-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitglassXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tBitglassToken\n\t\tBitglassServiceURL\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**BitglassToken** and **BitglassServiceURL** are required for making API calls.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Bitglass/Data%20Connectors/Bitglass_API_FunctionApp.json","true" +"BitwardenEventLogs_CL","Bitwarden","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Bitwarden","8bit-solutions-llc","bitwarden-sentinel-integration","2024-05-12","2024-10-02","","Bitwarden Inc","Partner","https://bitwarden.com","","domains","BitwardenEventLogs","Bitwarden Inc","Bitwarden Event Logs","This connector provides insight into activity of your Bitwarden organization such as user's activity (logged in, changed password, 2fa, etc.), cipher activity (created, updated, deleted, shared, etc.), collection activity, organization activity, and more.","[{""description"": ""Your API key can be found in the Bitwarden organization admin console.\nPlease see [Bitwarden documentation](https://bitwarden.com/help/public-api/#authentication) for more information.\nSelf-hosted Bitwarden servers may need to reconfigure their installation's URL."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Bitwarden Identity Url"", ""placeholder"": ""https://identity.bitwarden.com"", ""type"": ""text"", ""name"": ""identityEndpoint""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Bitwarden Api Url"", ""placeholder"": ""https://api.bitwarden.com"", ""type"": ""text"", ""name"": ""apiEndpoint""}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}], ""title"": ""Connect Bitwarden Event Logs to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Bitwarden Client Id and Client Secret"", ""description"": ""Your API key can be found in the Bitwarden organization admin console. Please see [Bitwarden documentation](https://bitwarden.com/help/public-api/#authentication) for more information.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Bitwarden/Data%20Connectors/BitwardenEventLogs/definitions.json","true" +"BitwardenGroups_CL","Bitwarden","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Bitwarden","8bit-solutions-llc","bitwarden-sentinel-integration","2024-05-12","2024-10-02","","Bitwarden Inc","Partner","https://bitwarden.com","","domains","BitwardenEventLogs","Bitwarden Inc","Bitwarden Event Logs","This connector provides insight into activity of your Bitwarden organization such as user's activity (logged in, changed password, 2fa, etc.), cipher activity (created, updated, deleted, shared, etc.), collection activity, organization activity, and more.","[{""description"": ""Your API key can be found in the Bitwarden organization admin console.\nPlease see [Bitwarden documentation](https://bitwarden.com/help/public-api/#authentication) for more information.\nSelf-hosted Bitwarden servers may need to reconfigure their installation's URL."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Bitwarden Identity Url"", ""placeholder"": ""https://identity.bitwarden.com"", ""type"": ""text"", ""name"": ""identityEndpoint""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Bitwarden Api Url"", ""placeholder"": ""https://api.bitwarden.com"", ""type"": ""text"", ""name"": ""apiEndpoint""}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}], ""title"": ""Connect Bitwarden Event Logs to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Bitwarden Client Id and Client Secret"", ""description"": ""Your API key can be found in the Bitwarden organization admin console. Please see [Bitwarden documentation](https://bitwarden.com/help/public-api/#authentication) for more information.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Bitwarden/Data%20Connectors/BitwardenEventLogs/definitions.json","true" +"BitwardenMembers_CL","Bitwarden","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Bitwarden","8bit-solutions-llc","bitwarden-sentinel-integration","2024-05-12","2024-10-02","","Bitwarden Inc","Partner","https://bitwarden.com","","domains","BitwardenEventLogs","Bitwarden Inc","Bitwarden Event Logs","This connector provides insight into activity of your Bitwarden organization such as user's activity (logged in, changed password, 2fa, etc.), cipher activity (created, updated, deleted, shared, etc.), collection activity, organization activity, and more.","[{""description"": ""Your API key can be found in the Bitwarden organization admin console.\nPlease see [Bitwarden documentation](https://bitwarden.com/help/public-api/#authentication) for more information.\nSelf-hosted Bitwarden servers may need to reconfigure their installation's URL."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Bitwarden Identity Url"", ""placeholder"": ""https://identity.bitwarden.com"", ""type"": ""text"", ""name"": ""identityEndpoint""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Bitwarden Api Url"", ""placeholder"": ""https://api.bitwarden.com"", ""type"": ""text"", ""name"": ""apiEndpoint""}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}], ""title"": ""Connect Bitwarden Event Logs to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Bitwarden Client Id and Client Secret"", ""description"": ""Your API key can be found in the Bitwarden organization admin console. Please see [Bitwarden documentation](https://bitwarden.com/help/public-api/#authentication) for more information.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Bitwarden/Data%20Connectors/BitwardenEventLogs/definitions.json","true" +"","Blackberry CylancePROTECT","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Blackberry%20CylancePROTECT","azuresentinel","azure-sentinel-solution-blackberrycylanceprotect","2022-05-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","BlinkOps","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BlinkOps","blinkoperations1709924858838","azure-sentinel-blink_automation","2025-05-05","","","Blink Support","Partner","https://support.blinkops.com","","domains","","","","","","","","false" +"BHEAttackPathsData_CL","BloodHound Enterprise","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BloodHound%20Enterprise","azurehoundenterprise","bloodhoundenterprise-azuresentinel","2023-05-04","2021-05-04","","SpecterOps","Partner","https://bloodhoundenterprise.io/","","domains","BloodHoundEnterprise","SpecterOps","Bloodhound Enterprise","The solution is designed to test Bloodhound Enterprise package creation process.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a 'BloodHound Enterprise' to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieve BloodHound Enterprise API Key and ID**\n\nTo enable the Azure Function to authenticate successfully and pull logs into Microsoft Sentinel, you must first obtain the API Key and ID from your BloodHound Enterprise instance. See the documentation to learn more about API on the `https://bloodhound.specterops.io/integrations/bloodhound-api/working-with-api`.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the 'BloodHound Enterprise' connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the 'BloodHound Enterprise' API authorization key(s) or Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the 'BloodHound Enterprise' connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)]()\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Tenant URL**, **API Key**, **API ID** 'and/or Other required fields'. \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": """", ""description"": ""**Option 2 - Manual Deployment of Azure Functions**\n\n Use the following step-by-step instructions to deploy the 'BloodHound Enterprise' connector manually with Azure Functions.""}, {""title"": ""1. Create a Function App"", ""description"": ""1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, ensure Runtime stack is set to **python 3.11**. \n4. In the **Hosting** tab, ensure **Plan type** is set to **'Consumption (Serverless)'**.\n5.select Storage account\n6. 'Add other required configurations'. \n5. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""2. Import Function App Code(Zip deployment)"", ""description"": ""1. Install Azure CLI\n2. From terminal type **az functionapp deployment source config-zip -g -n --src ** and hit enter. Set the `ResourceGroup` value to: your resource group name. Set the `FunctionApp` value to: your newly created function app name. Set the `Zip File` value to: `digitalshadowsConnector.zip`(path to your zip file). Note:- Download the zip file from the link - [Function App Code](https://github.com/metron-labs/Azure-Sentinel/blob/bloodhound/Solutions/BloodHound/Data%20Connectors/BloodHoundAzureFunction.zip)""}, {""title"": ""3. Configure the Function App"", ""description"": ""1. In the Function App screen, click the Function App name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following 'x (number of)' application settings individually, under Name, with their respective string values (case-sensitive) under Value: \n\t\tDigitalShadowsAccountID\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tDigitalShadowsKey\n\t\tDigitalShadowsSecret\n\t\tHistoricalDays\n\t\tDigitalShadowsURL\n\t\tClassificationFilterOperation\n\t\tHighVariabilityClassifications\n\t\tFUNCTION_NAME\n\t\tlogAnalyticsUri (optional)\n(add any other settings required by the Function App)\nSet the `DigitalShadowsURL` value to: `https://api.searchlight.app/v1`\nSet the `HighVariabilityClassifications` value to: `exposed-credential,marked-document`\nSet the `ClassificationFilterOperation` value to: `exclude` for exclude function app or `include` for include function app \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Azure Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: https://.ods.opinsights.azure.us. \n4. Once all application settings have been entered, click **Save**.""}, {""instructions"": [{""type"": ""InstructionStepsGroup"", ""parameters"": {""enable"": true, ""instructionSteps"": [{""title"": ""**STEP 3 - Register the Application in Microsoft Entra ID"", ""description"": ""1. **Open the [Microsoft Entra ID page](https://entra.microsoft.com/)**:\n - Click the provided link to open the **Microsoft Entra ID** registration page in a new tab.\n - Ensure you are logged in with an account that has **Admin level** permissions.\n\n2. **Create a New Application**:\n - In the **Microsoft Entra ID portal**, select **App registrations** mentioned on the left-hand side tab.\n - Click on **+ New registration**.\n - Fill out the following fields:\n - **Name**: Enter a name for the app (e.g., \u201cBloodHound App\u201d).\n - **Supported account types**: Choose **Accounts in this organizational directory only** (Default Directory only - Single tenant).\n - **Redirect URI**: Leave this blank unless required otherwise.\n - Click **Register** to create the application.\n\n3. **Copy Application and Tenant IDs**:\n - Once the app is registered, note the **Application (client) ID** and **Directory (tenant) ID** from the **Overview** page. You\u2019ll need these for the integration.\n\n4. **Create a Client Secret**:\n - In the **Certificates & secrets** section, click **+ New client secret**.\n - Add a description (e.g., 'BloodHound Secret') and set an expiration (e.g., 1 year).\n - Click **Add**.\n - **Copy the client secret value immediately**, as it will not be shown again.""}, {""title"": ""**STEP 4 - Assign the \""Monitoring Metrics Publisher\"" Role to the App"", ""description"": ""1. **Open the Resource Group in Azure Portal**:\n - Navigate to the **Resource Group** that contains the **Log Analytics Workspace** and **Data Collection Rules (DCRs)** where you want the app to push data.\n\n2. **Assign the Role**:\n - In the **Resource Group** menu, click on **Access control (IAM)** mentioned on the left-hand side tab ..\n - Click on **+ Add** and select **Add role assignment**.\n - In the **Role** dropdown, search for and select the **Monitoring Metrics Publisher** role.\n - Under **Assign access to**, choose **Azure AD user, group, or service principal**.\n - In the **Select** field, search for your registered app by **name** or **client ID**.\n - Click **Save** to assign the role to the application.""}, {""title"": ""**STEP 5 - Deploy the ARM Template"", ""description"": ""1. **Retrieve the Workspace ID**:\n - After assigning the role, you will need the **Workspace ID**.\n - Navigate to the **Log Analytics Workspace** within the **Resource Group**.\n - In the **Overview** section, locate the **Workspace ID** field under **Workspace details**.\n - **Copy the Workspace ID** and keep it handy for the next steps.\n\n2. **Click the Deploy to Azure Button**:\n - [![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fmetron-labs%2FAzure-Sentinel%2Fbloodhound%2FSolutions%2FBloodHound%2FData%2520Connectors%2FDeployToAzure.json).\n - This will take you directly to the Azure portal to start the deployment.\n\n3. **Review and Customize Parameters**:\n - On the custom deployment page, ensure you\u2019re deploying to the correct **subscription** and **resource group**.\n - Fill in the parameters like **workspace name**, **workspace ID**, and **workspace location**.\n\n4. **Click Review + Create** and then **Create** to deploy the resources.""}, {""title"": ""**STEP 6 - Verify DCE, DCR, and Log Analytics Table Setup"", ""description"": ""1. **Check the Data Collection Endpoint (DCE)**:\n - After deploying, go to **Azure Portal > Data Collection Endpoints**.\n - Verify that the **BloodHoundDCE** endpoint has been created successfully.\n - **Copy the DCE Logs Ingestion URI**, as you\u2019ll need this for generating the webhook URL.\n\n2. **Confirm Data Collection Rule (DCR) Setup**:\n - Go to **Azure Portal > Data Collection Rules**.\n - Ensure the **BloodHoundDCR** rule is present.\n - **Copy the Immutable ID** of the DCR from the Overview page, as you\u2019ll need it for the webhook URL.\n\n3. **Validate Log Analytics Table**:\n - Navigate to your **Log Analytics Workspace** (linked to Microsoft Sentinel).\n - Under the **Tables** section, verify that the **BloodHoundTable_CL** table has been created successfully and is ready to receive data.""}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**BloodHound Enterprise API key & Id** is required. See the documentation to learn more about API on the `https://bloodhound.specterops.io/integrations/bloodhound-api/working-with-api`.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BloodHound%20Enterprise/Data%20Connectors/BloodHoundFunction.json","true" +"BoxEvents_CL","Box","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Box","azuresentinel","azure-sentinel-solution-box","2022-05-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","BoxDataConnector","Box","Box","The Box data connector provides the capability to ingest [Box enterprise's events](https://developer.box.com/guides/events/#admin-events) into Microsoft Sentinel using the Box REST API. Refer to [Box documentation](https://developer.box.com/guides/events/enterprise-events/for-enterprise/) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Box REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This connector depends on a parser based on Kusto Function to work as expected [**BoxEvents**](https://aka.ms/sentinel-BoxDataConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Box events collection**\n\nSee documentation to [setup JWT authentication](https://developer.box.com/guides/authentication/jwt/jwt-setup/) and [obtain JSON file with credentials](https://developer.box.com/guides/authentication/jwt/with-sdk/#prerequisites).""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Box data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Box JSON configuration file, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Box data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BoxDataConnector-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **AzureSentinelWorkspaceId**, **AzureSentinelSharedKey**, **BoxConfigJSON**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Box data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentinel-BoxDataConnector-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAzureSentinelWorkspaceId\n\t\tAzureSentinelSharedKey\n\t\tBOX_CONFIG_JSON\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Box API Credentials"", ""description"": ""Box config JSON file is required for Box REST API JWT authentication. [See the documentation to learn more about JWT authentication](https://developer.box.com/guides/authentication/jwt/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Box/Data%20Connectors/Box_API_FunctionApp.json","true" +"BoxEventsV2_CL","Box","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Box","azuresentinel","azure-sentinel-solution-box","2022-05-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","BoxEventsCCPDefinition","Microsoft","Box Events (CCP)","The Box data connector provides the capability to ingest [Box enterprise's events](https://developer.box.com/guides/events/#admin-events) into Microsoft Sentinel using the Box REST API. Refer to [Box documentation](https://developer.box.com/guides/events/enterprise-events/for-enterprise/) for more information.","[{""description"": "">**NOTE:** This connector uses Codeless Connecor Platform (CCP) to connect to the Box REST API to pull logs into Microsoft Sentinel.""}, {""description"": "">**NOTE:** This connector depends on a parser based on Kusto Function to work as expected [**BoxEvents**](https://aka.ms/sentinel-BoxDataConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""description"": ""**STEP 1 - Create Box Custom Application**\n\nSee documentation to [setup client credentials authentication](https://developer.box.com/guides/authentication/client-credentials/client-credentials-setup/)\n""}, {""description"": ""**STEP 2 - Grab Client ID and Client Secret values**\n\nYou might need to setup 2FA to fetch the secret.\n""}, {""description"": ""**STEP 3 - Grab Box Enterprise ID from Box Admin Console**\n\nSee documentation to [find Enterprise ID](https://developer.box.com/platform/appendix/locating-values/)\n""}, {""description"": ""Provide the required values below:\n"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Box Enterprise ID"", ""placeholder"": ""123456"", ""type"": ""text"", ""name"": ""boxEnterpriseId""}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}], ""title"": ""Connect to Box to start collecting event logs to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Box API credentials"", ""description"": ""Box API requires a Box App client ID and client secret to authenticate. [See the documentation to learn more about Client Credentials grant](https://developer.box.com/guides/authentication/client-credentials/client-credentials-setup/)""}, {""name"": ""Box Enterprise ID"", ""description"": ""Box Enterprise ID is required to make the connection. See documentation to [find Enterprise ID](https://developer.box.com/platform/appendix/locating-values/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Box/Data%20Connectors/BoxEvents_ccp/BoxEvents_DataConnectorDefinition.json","true" +"BoxEvents_CL","Box","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Box","azuresentinel","azure-sentinel-solution-box","2022-05-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","BoxEventsCCPDefinition","Microsoft","Box Events (CCP)","The Box data connector provides the capability to ingest [Box enterprise's events](https://developer.box.com/guides/events/#admin-events) into Microsoft Sentinel using the Box REST API. Refer to [Box documentation](https://developer.box.com/guides/events/enterprise-events/for-enterprise/) for more information.","[{""description"": "">**NOTE:** This connector uses Codeless Connecor Platform (CCP) to connect to the Box REST API to pull logs into Microsoft Sentinel.""}, {""description"": "">**NOTE:** This connector depends on a parser based on Kusto Function to work as expected [**BoxEvents**](https://aka.ms/sentinel-BoxDataConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""description"": ""**STEP 1 - Create Box Custom Application**\n\nSee documentation to [setup client credentials authentication](https://developer.box.com/guides/authentication/client-credentials/client-credentials-setup/)\n""}, {""description"": ""**STEP 2 - Grab Client ID and Client Secret values**\n\nYou might need to setup 2FA to fetch the secret.\n""}, {""description"": ""**STEP 3 - Grab Box Enterprise ID from Box Admin Console**\n\nSee documentation to [find Enterprise ID](https://developer.box.com/platform/appendix/locating-values/)\n""}, {""description"": ""Provide the required values below:\n"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Box Enterprise ID"", ""placeholder"": ""123456"", ""type"": ""text"", ""name"": ""boxEnterpriseId""}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}], ""title"": ""Connect to Box to start collecting event logs to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Box API credentials"", ""description"": ""Box API requires a Box App client ID and client secret to authenticate. [See the documentation to learn more about Client Credentials grant](https://developer.box.com/guides/authentication/client-credentials/client-credentials-setup/)""}, {""name"": ""Box Enterprise ID"", ""description"": ""Box Enterprise ID is required to make the connection. See documentation to [find Enterprise ID](https://developer.box.com/platform/appendix/locating-values/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Box/Data%20Connectors/BoxEvents_ccp/BoxEvents_DataConnectorDefinition.json","true" +"CommonSecurityLog","Broadcom SymantecDLP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Broadcom%20SymantecDLP","azuresentinel","azure-sentinel-solution-broadcomsymantecdlp","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","BroadcomSymantecDLP","Broadcom","[Deprecated] Broadcom Symantec DLP via Legacy Agent","The [Broadcom Symantec Data Loss Prevention (DLP)](https://www.broadcom.com/products/cyber-security/information-protection/data-loss-prevention) connector allows you to easily connect your Symantec DLP with Microsoft Sentinel, to create custom dashboards, alerts, and improve investigation. This gives you more insight into your organization’s information, where it travels, and improves your security operation capabilities.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias SymantecDLP and load the function code or click [here](https://aka.ms/sentinel-symantecdlp-parser). The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python \u2013version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Symantec DLP logs to a Syslog agent"", ""description"": ""Configure Symantec DLP to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent.\n1. [Follow these instructions](https://knowledge.broadcom.com/external/article/159509/generating-syslog-messages-from-data-los.html) to configure the Symantec DLP to forward syslog\n2. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python \u2013version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Broadcom%20SymantecDLP/Data%20Connectors/Connector_Syslog_SymantecDLP.json","true" +"CommonSecurityLog","Broadcom SymantecDLP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Broadcom%20SymantecDLP","azuresentinel","azure-sentinel-solution-broadcomsymantecdlp","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","BroadcomSymantecDLPAma","Broadcom","[Deprecated] Broadcom Symantec DLP via AMA","The [Broadcom Symantec Data Loss Prevention (DLP)](https://www.broadcom.com/products/cyber-security/information-protection/data-loss-prevention) connector allows you to easily connect your Symantec DLP with Microsoft Sentinel, to create custom dashboards, alerts, and improve investigation. This gives you more insight into your organization’s information, where it travels, and improves your security operation capabilities.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias SymantecDLP and load the function code or click [here](https://aka.ms/sentinel-symantecdlp-parser). The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Forward Symantec DLP logs to a Syslog agent"", ""description"": ""Configure Symantec DLP to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent.\n1. [Follow these instructions](https://knowledge.broadcom.com/external/article/159509/generating-syslog-messages-from-data-los.html) to configure the Symantec DLP to forward syslog\n2. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Broadcom%20SymantecDLP/Data%20Connectors/template_SymantecDLPAMA.json","true" +"","Business Email Compromise - Financial Fraud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Business%20Email%20Compromise%20-%20Financial%20Fraud","azuresentinel","azure-sentinel-solution-bec_financialfraud","2023-08-04","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"Syslog","CTERA","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CTERA","cteranetworksltd1651947437632","ctera-microsoft-sentinel","2024-07-28","","","CTERA","Partner","https://www.ctera.com/","","domains","CTERA","CTERA Networks Ltd","CTERA Syslog","The CTERA Data Connector for Microsoft Sentinel offers monitoring and threat detection capabilities for your CTERA solution.
It includes a workbook visualizing the sum of all operations per type, deletions, and denied access operations.
It also provides analytic rules which detects ransomware incidents and alert you when a user is blocked due to suspicious ransomware activity.
Additionally, it helps you identify critical patterns such as mass access denied events, mass deletions, and mass permission changes, enabling proactive threat management and response.","[{""title"": ""Step 1: Connect CTERA Platform to Syslog"", ""description"": ""Set up your CTERA portal syslog connection and Edge-Filer Syslog connector"", ""instructions"": [{""parameters"": {""title"": ""CTERA Syslog Configuration"", ""instructionSteps"": [{""title"": ""Portal Syslog connection"", ""description"": ""Connect CTERA Portal to syslog server, see instructions https://kb.ctera.com/v1/docs/en/managing-log-settings?highlight=logg""}, {""title"": ""Edge Filer Audit logs"", ""description"": ""Enable Audit logs on the desired Edge-filers""}, {""title"": ""Edge-Filer Syslog Service"", ""description"": ""Enable Edge-Filer Syslog service, see instructions https://kb.ctera.com/v1/docs/en/setting-up-the-edge-filer-syslog-service-2?highlight=Edge%20Filer%20Syslog""}]}}]}, {""title"": ""Step 2: Install Azure Monitor Agent (AMA) on Syslog Server"", ""description"": ""Install the Azure Monitor Agent (AMA) on your syslog server to enable data collection."", ""instructions"": [{""parameters"": {""title"": ""Install Azure Monitor Agent"", ""instructionSteps"": [{""title"": ""Log in to Azure Portal"", ""description"": ""Use your Azure credentials to log in to the Azure Portal.""}, {""title"": ""Navigate to Azure Arc"", ""description"": ""In the Azure Portal, go to 'Azure Arc' and select your connected syslog server.""}, {""title"": ""Select Extensions"", ""description"": ""In the Azure Arc settings for your syslog server, navigate to the 'Extensions' section.""}, {""title"": ""Add Extension"", ""description"": ""Click on 'Add' and select 'Azure Monitor Agent' from the list of available extensions.""}, {""title"": ""Install AMA"", ""description"": ""Follow the prompts to install the Azure Monitor Agent on your syslog server. For detailed instructions, refer to the official documentation: [Install Azure Monitor Agent](https://learn.microsoft.com/en-us/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal)""}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CTERA/Data%20Connectors/CTERA_Data_Connector.json","true" +"CBSLog_Azure_1_CL","CTM360","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CTM360","ctm360wll1698919697848","ctm360_microsoft_sentinel_solution","2023-10-23","","","Cyber Threat Management 360","Partner","https://www.ctm360.com/","","domains","CBSPollingIDAzureFunctions","CTM360","Cyber Blind Spot Integration","Through the API integration, you have the capability to retrieve all the issues related to your CBS organizations via a RESTful interface.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a 'CyberBlindSpot' to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the 'CyberBlindSpot' API**\n\nThe provider should provide or link to detailed steps to configure the 'CyberBlindSpot' API endpoint so that the Azure Function can authenticate to it successfully, get its authorization key or token, and pull the appliance's logs into Microsoft Sentinel.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the 'CyberBlindSpot' connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the 'CyberBlindSpot' API authorization key(s) readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the 'CyberBlindSpot' connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CTM360-CBS-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CTM360-CBS-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API **, 'and/or Other required fields'. \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the CTM360 CBS data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://raw.githubusercontent.com/CTM360-Integrations/Azure-Sentinel/ctm360-HV-CBS-azurefunctionapp/Solutions/CTM360/Data%20Connectors/CBS/AzureFunctionCTM360_CBS.zip) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CTIXYZ).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tCTM360AccountID\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tCTM360Key\n\t\tFUNCTION_NAME\n\t\tlogAnalyticsUri - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CTM360/Data%20Connectors/CBS/CTM360_CBS_API_functionApp.json","true" +"HackerViewLog_Azure_1_CL","CTM360","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CTM360","ctm360wll1698919697848","ctm360_microsoft_sentinel_solution","2023-10-23","","","Cyber Threat Management 360","Partner","https://www.ctm360.com/","","domains","HVPollingIDAzureFunctions","CTM360","HackerView Intergration","Through the API integration, you have the capability to retrieve all the issues related to your HackerView organizations via a RESTful interface.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a '' to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the 'HackerView' API**\n\nThe provider should provide or link to detailed steps to configure the 'HackerView' API endpoint so that the Azure Function can authenticate to it successfully, get its authorization key or token, and pull the appliance's logs into Microsoft Sentinel.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the 'HackerView' connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the 'HackerView' API authorization key(s) readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the 'HackerView' connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CTM360-HV-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CTM360-HV-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API **, 'and/or Other required fields'. \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": """", ""description"": ""**Option 2 - Manual Deployment of Azure Functions**\n\n Use the following step-by-step instructions to deploy the 'HackerView' connector manually with Azure Functions.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the CTM360 CBS data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://raw.githubusercontent.com/CTM360-Integrations/Azure-Sentinel/ctm360-HV-CBS-azurefunctionapp/Solutions/CTM360/Data%20Connectors/HackerView/AzureFunctionCTM360_HV.zip) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CTIXYZ).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tCTM360AccountID\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tCTM360Key\n\t\tFUNCTION_NAME\n\t\tlogAnalyticsUri - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CTM360/Data%20Connectors/HackerView/CTM360_HV_API_FunctionApp.json","true" +"","Check Point","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Check%20Point","checkpoint","checkpoint-sentinel-solutions","2021-08-13","","","Check Point","Partner","https://www.checkpoint.com/support-services/contact-support/","","domains","","","","","","","","false" +"CloudGuard_SecurityEvents_CL","Check Point CloudGuard CNAPP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Check%20Point%20CloudGuard%20CNAPP","checkpoint","checkpoint-sentinel-solutions-cloud-guard","2024-11-12","","","Check Point","Partner","https://www.checkpoint.com/support-services/contact-support/","","domains","CloudGuardCCPDefinition","CheckPoint","Check Point CloudGuard CNAPP Connector for Microsoft Sentinel","The [CloudGuard](https://sc1.checkpoint.com/documents/CloudGuard_Dome9/Documentation/Overview/CloudGuard-CSPM-Introduction.htm?cshid=help_center_documentation) data connector enables the ingestion of security events from the CloudGuard API into Microsoft Sentinel™, using Microsoft Sentinel’s Codeless Connector Platform. The connector supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) which parses incoming security event data into custom columns. This pre-parsing process eliminates the need for query-time parsing, resulting in improved performance for data queries.","[{""description"": ""To enable the CloudGuard connector for Microsoft Sentinel, enter the required information below and select Connect.\n>"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""API Key ID"", ""placeholder"": ""api_key"", ""type"": ""text"", ""name"": ""api_key""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key Secret"", ""placeholder"": ""api_secret"", ""type"": ""password"", ""name"": ""api_secret""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CloudGuard Endpoint URL"", ""placeholder"": ""e.g. https://api.dome9.com"", ""type"": ""text"", ""name"": ""endpoint_url""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Filter"", ""placeholder"": ""Paste filter from CloudGuard"", ""type"": ""text"", ""name"": ""query_filter""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""title"": ""Connect CloudGuard Security Events to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""CloudGuard API Key"", ""description"": ""Refer to the instructions provided [here](https://sc1.checkpoint.com/documents/CloudGuard_Dome9/Documentation/Settings/Users-Roles.htm#add_service) to generate an API key.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Check%20Point%20CloudGuard%20CNAPP/Data%20Connectors/CloudGuard_ccp/CloudGuard_DataConnectorDefinition.json","true" +"argsentdc_CL","Check Point Cyberint Alerts","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Check%20Point%20Cyberint%20Alerts","checkpoint","checkpoint-cyberint-solutions-alerts","2025-03-18","","","Cyberint","Partner","https://cyberint.com/customer-support/","","domains","CheckPointCyberintAlerts","Checkpoint Cyberint","Check Point Cyberint Alerts Connector (via Codeless Connector Platform)","Cyberint, a Check Point company, provides a Microsoft Sentinel integration to streamline critical Alerts and bring enriched threat intelligence from the Infinity External Risk Management solution into Microsoft Sentinel. This simplifies the process of tracking the status of tickets with automatic sync updates across systems. Using this new integration for Microsoft Sentinel, existing Cyberint and Microsoft Sentinel customers can easily pull logs based on Cyberint's findings into Microsoft Sentinel platform.","[{""title"": ""Connect Checkpoint Cyberint Alerts to Microsoft Sentinel"", ""description"": ""To enable the connector provide the required information below and click on Connect.\n>"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Argos URL"", ""placeholder"": ""Argos URL"", ""type"": ""text"", ""name"": ""argosurl""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""apikey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Customer Name"", ""placeholder"": ""Customer Name"", ""type"": ""text"", ""name"": ""customername""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""Connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Check Point Cyberint API Key, Argos URL, and Customer Name"", ""description"": ""The connector API key, Argos URL, and Customer Name are required""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Check%20Point%20Cyberint%20Alerts/Data%20Connectors/CyberintArgosAlertsLogs_ccp/CyberintArgosAlertsLogs_connectorDefinition.json","true" +"iocsent_CL","Check Point Cyberint IOC","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Check%20Point%20Cyberint%20IOC","checkpoint","azure-sentinel-checkpoint-cyberint-ioc","2025-04-29","","","Cyberint","Partner","https://cyberint.com/customer-support/","","domains","CheckPointCyberintIOC","Checkpoint Cyberint","Check Point Cyberint IOC Connector","This is data connector for Check Point Cyberint IOC.","[{""title"": ""Connect Checkpoint Cyberint Alerts to Microsoft Sentinel"", ""description"": ""To enable the connector provide the required information below and click on Connect.\n>"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Argos URL"", ""placeholder"": ""Argos URL"", ""type"": ""text"", ""name"": ""argosurl""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API key"", ""placeholder"": ""API key"", ""type"": ""text"", ""name"": ""apikey""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""Connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Check Point Cyberint API Key and Argos URL"", ""description"": ""The connector API key and Argos URL are required""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Check%20Point%20Cyberint%20IOC/Data%20Connectors/CyberintArgosIOCLogs_ccp/CyberintArgosIOCLogs_connectorDefinition.json","true" +"","CheckPhish by Bolster","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CheckPhish%20by%20Bolster","azuresentinel","azure-sentinel-solution-checkphishbybolster","2022-10-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"Syslog","Cisco ACI","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20ACI","azuresentinel","azure-sentinel-solution-ciscoaci","2021-07-03","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoACI","Cisco","[Deprecated] Cisco Application Centric Infrastructure","[Cisco Application Centric Infrastructure (ACI)](https://www.cisco.com/c/en/us/solutions/collateral/data-center-virtualization/application-centric-infrastructure/solution-overview-c22-741487.html) data connector provides the capability to ingest [Cisco ACI logs](https://www.cisco.com/c/en/us/td/docs/switches/datacenter/aci/apic/sw/all/syslog/guide/b_ACI_System_Messages_Guide/m-aci-system-messages-reference.html) into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**CiscoACIEvent**](https://aka.ms/sentinel-CiscoACI-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using Cisco ACI Release 1.x"", ""instructions"": []}, {""title"": ""1. Configure Cisco ACI system sending logs via Syslog to remote server where you will install the agent."", ""description"": ""[Follow these steps](https://www.cisco.com/c/en/us/td/docs/switches/datacenter/aci/apic/sw/1-x/basic-config/b_ACI_Config_Guide/b_ACI_Config_Guide_chapter_010.html#d2933e4611a1635) to configure Syslog Destination, Destination Group, and Syslog Source.""}, {""title"": ""2. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Server to which the logs will be forwarded.\n\n> Logs on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""3. Check logs in Microsoft Sentinel"", ""description"": ""Open Log Analytics to check if the logs are received using the Syslog schema.\n\n>**NOTE:** It may take up to 15 minutes before new logs will appear in Syslog table."", ""instructions"": []}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20ACI/Data%20Connectors/CiscoACI_Syslog.json","true" +"CiscoETD_CL","Cisco ETD","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20ETD","cisco","cisco-etd-sentinel","2024-03-04","","","Cisco Systems","Partner","","","domains","CiscoETD","Cisco","Cisco ETD","The connector fetches data from ETD api for threat analysis","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ETD API to pull its logs into Microsoft Sentinel.""}, {""title"": """", ""description"": ""**Follow the deployment steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the ETD data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following).\n"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco ETD data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CiscoETD-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Region**. \n3. Enter the **WorkspaceID**, **SharedKey**, **ClientID**, **ClientSecret**, **ApiKey**, **Verdicts**, **ETD Region**\n4. Click **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Email Threat Defense API, API key, Client ID and Secret"", ""description"": ""Ensure you have the API key, Client ID and Secret key.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20ETD/Data%20Connectors/CiscoETD_API_FunctionApp.json","true" +"CommonSecurityLog","Cisco Firepower EStreamer","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Firepower%20EStreamer","cisco","cisco-firepower-estreamer","2022-05-25","","","Cisco","Partner","https://www.cisco.com/c/en_in/support/index.html","","domains","CiscoFirepowerEStreamer","Cisco","[Deprecated] Cisco Firepower eStreamer via Legacy Agent","eStreamer is a Client Server API designed for the Cisco Firepower NGFW Solution. The eStreamer client requests detailed event data on behalf of the SIEM or logging solution in the Common Event Format (CEF).","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 25226 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Install the Firepower eNcore client"", ""description"": ""Install and configure the Firepower eNcore eStreamer client, for more details see full install [guide](https://www.cisco.com/c/en/us/td/docs/security/firepower/670/api/eStreamer_enCore/eStreamereNcoreSentinelOperationsGuide_409.html)"", ""innerSteps"": [{""title"": ""2.1 Download the Firepower Connector from github"", ""description"": ""Download the latest version of the Firepower eNcore connector for Microsoft Sentinel [here](https://github.com/CiscoSecurity/fp-05-microsoft-sentinel-connector). If you plan on using python3 use the [python3 eStreamer connector](https://github.com/CiscoSecurity/fp-05-microsoft-sentinel-connector/tree/python3)""}, {""title"": ""2.2 Create a pkcs12 file using the Azure/VM Ip Address"", ""description"": ""Create a pkcs12 certificate using the public IP of the VM instance in Firepower under System->Integration->eStreamer, for more information please see install [guide](https://www.cisco.com/c/en/us/td/docs/security/firepower/670/api/eStreamer_enCore/eStreamereNcoreSentinelOperationsGuide_409.html#_Toc527049443)""}, {""title"": ""2.3 Test Connectivity between the Azure/VM Client and the FMC"", ""description"": ""Copy the pkcs12 file from the FMC to the Azure/VM instance and run the test utility (./encore.sh test) to ensure a connection can be established, for more details please see the setup [guide](https://www.cisco.com/c/en/us/td/docs/security/firepower/670/api/eStreamer_enCore/eStreamereNcoreSentinelOperationsGuide_409.html#_Toc527049430)""}, {""title"": ""2.4 Configure encore to stream data to the agent"", ""description"": ""Configure encore to stream data via TCP to the Microsoft Agent, this should be enabled by default, however, additional ports and streaming protocols can configured depending on your network security posture, it is also possible to save the data to the file system, for more information please see [Configure Encore](https://www.cisco.com/c/en/us/td/docs/security/firepower/670/api/eStreamer_enCore/eStreamereNcoreSentinelOperationsGuide_409.html#_Toc527049433)""}]}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Firepower%20EStreamer/Data%20Connectors/CiscoFirepowerEStreamerCollector.json","true" +"CommonSecurityLog","Cisco Firepower EStreamer","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Firepower%20EStreamer","cisco","cisco-firepower-estreamer","2022-05-25","","","Cisco","Partner","https://www.cisco.com/c/en_in/support/index.html","","domains","CiscoFirepowerEStreamerAma","Cisco","[Deprecated] Cisco Firepower eStreamer via AMA","eStreamer is a Client Server API designed for the Cisco Firepower NGFW Solution. The eStreamer client requests detailed event data on behalf of the SIEM or logging solution in the Common Event Format (CEF).","[{""title"": """", ""description"": """", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Install the Firepower eNcore client"", ""description"": ""Install and configure the Firepower eNcore eStreamer client, for more details see full install [guide](https://www.cisco.com/c/en/us/td/docs/security/firepower/670/api/eStreamer_enCore/eStreamereNcoreSentinelOperationsGuide_409.html)"", ""innerSteps"": [{""title"": ""1. Download the Firepower Connector from github"", ""description"": ""Download the latest version of the Firepower eNcore connector for Microsoft Sentinel [here](https://github.com/CiscoSecurity/fp-05-microsoft-sentinel-connector). If you plan on using python3 use the [python3 eStreamer connector](https://github.com/CiscoSecurity/fp-05-microsoft-sentinel-connector/tree/python3)""}, {""title"": ""2. Create a pkcs12 file using the Azure/VM Ip Address"", ""description"": ""Create a pkcs12 certificate using the public IP of the VM instance in Firepower under System->Integration->eStreamer, for more information please see install [guide](https://www.cisco.com/c/en/us/td/docs/security/firepower/670/api/eStreamer_enCore/eStreamereNcoreSentinelOperationsGuide_409.html#_Toc527049443)""}, {""title"": ""3. Test Connectivity between the Azure/VM Client and the FMC"", ""description"": ""Copy the pkcs12 file from the FMC to the Azure/VM instance and run the test utility (./encore.sh test) to ensure a connection can be established, for more details please see the setup [guide](https://www.cisco.com/c/en/us/td/docs/security/firepower/670/api/eStreamer_enCore/eStreamereNcoreSentinelOperationsGuide_409.html#_Toc527049430)""}, {""title"": ""4. Configure encore to stream data to the agent"", ""description"": ""Configure encore to stream data via TCP to the Microsoft Agent, this should be enabled by default, however, additional ports and streaming protocols can configured depending on your network security posture, it is also possible to save the data to the file system, for more information please see [Configure Encore](https://www.cisco.com/c/en/us/td/docs/security/firepower/670/api/eStreamer_enCore/eStreamereNcoreSentinelOperationsGuide_409.html#_Toc527049433)""}]}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Firepower%20EStreamer/Data%20Connectors/template_CiscoFirepowerEStreamerAMA.json","true" +"Syslog","Cisco ISE","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20ISE","azuresentinel","azure-sentinel-solution-ciscoise","2021-07-03","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoISE","Cisco","[Deprecated] Cisco Identity Services Engine","The Cisco Identity Services Engine (ISE) data connector provides the capability to ingest [Cisco ISE](https://www.cisco.com/c/en/us/products/security/identity-services-engine/index.html) events into Microsoft Sentinel. It helps you gain visibility into what is happening in your network, such as who is connected, which applications are installed and running, and much more. Refer to [Cisco ISE logging mechanism documentation](https://www.cisco.com/c/en/us/td/docs/security/ise/2-7/admin_guide/b_ise_27_admin_guide/b_ISE_admin_27_maintain_monitor.html#reference_BAFBA5FA046A45938810A5DF04C00591) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-ciscoise-parser) to create the Kusto Functions alias, **CiscoISEEvent**"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n2. Select **Apply below configuration to my machines** and select the facilities and severities.\n3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure Cisco ISE Remote Syslog Collection Locations"", ""description"": ""[Follow these instructions](https://www.cisco.com/c/en/us/td/docs/security/ise/2-7/admin_guide/b_ise_27_admin_guide/b_ISE_admin_27_maintain_monitor.html#ID58) to configure remote syslog collection locations in your Cisco ISE deployment.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20ISE/Data%20Connectors/Connector_Cisco_ISE.json","true" +"ASimAuditEventLogs","Cisco Meraki Events via REST API","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Meraki%20Events%20via%20REST%20API","azuresentinel","azure-sentinel-solution-ciscomerakinativepoller","2023-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoMerakiMultiRule","Microsoft","Cisco Meraki (using REST API)","The [Cisco Meraki](https://aka.ms/ciscomeraki) connector allows you to easily connect your Cisco Meraki organization events (Security events, Configuration Changes and API Requests) to Microsoft Sentinel. The data connector uses the [Cisco Meraki REST API](https://developer.cisco.com/meraki/api-v1/#!get-organization-appliance-security-events) to fetch logs and supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received data and ingests into ASIM and custom tables in your Log Analytics workspace. This data connector benefits from capabilities such as DCR based ingestion-time filtering, data normalization.

**Supported ASIM schema:**
1. Network Session
2. Web Session
3. Audit Event","[{""description"": ""Currently, this connector allows to ingest events from the following [Cisco Meraki REST API](https://developer.cisco.com/meraki/api-v1/#!get-organization-appliance-security-events) endpoint: \n 1. [Get Organization Appliance Security Events](https://developer.cisco.com/meraki/api-latest/#!get-organization-appliance-security-events) \n>This connector parses **IDS Alert** events into ASimNetworkSessionLogs Table and **File Scanned** events into ASimWebSessionLogs Table. \n 2. [Get Organization Api Requests](https://developer.cisco.com/meraki/api-latest/#!get-organization-api-requests) \n>This connector parses events into ASimWebSessionLogs Table. \n 3. [Get Organization Configuration Changes](https://developer.cisco.com/meraki/api-latest/#!get-organization-configuration-changes) \n>This connector parses events into ASimAuditEventLogs Table."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Organization Id"", ""placeholder"": ""OrganizationId"", ""type"": ""text"", ""name"": ""organization""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""ApiKey"", ""type"": ""password"", ""name"": ""apiKey""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""title"": ""Connect Cisco Meraki events to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Cisco Meraki REST API Key"", ""description"": ""Enable API access in Cisco Meraki and generate API Key. Please refer to Cisco Meraki official [documentation](https://aka.ms/ciscomerakiapikey) for more information.""}, {""name"": ""Cisco Meraki Organization Id"", ""description"": ""Obtain your Cisco Meraki organization id to fetch security events. Follow the steps in the [documentation](https://aka.ms/ciscomerakifindorg) to obtain the Organization Id using the Meraki API Key obtained in previous step.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Meraki%20Events%20via%20REST%20API/Data%20Connectors/CiscoMerakiMultiRule_ccp/dataConnectorDefinition.json","true" +"ASimNetworkSessionLogs","Cisco Meraki Events via REST API","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Meraki%20Events%20via%20REST%20API","azuresentinel","azure-sentinel-solution-ciscomerakinativepoller","2023-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoMerakiMultiRule","Microsoft","Cisco Meraki (using REST API)","The [Cisco Meraki](https://aka.ms/ciscomeraki) connector allows you to easily connect your Cisco Meraki organization events (Security events, Configuration Changes and API Requests) to Microsoft Sentinel. The data connector uses the [Cisco Meraki REST API](https://developer.cisco.com/meraki/api-v1/#!get-organization-appliance-security-events) to fetch logs and supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received data and ingests into ASIM and custom tables in your Log Analytics workspace. This data connector benefits from capabilities such as DCR based ingestion-time filtering, data normalization.

**Supported ASIM schema:**
1. Network Session
2. Web Session
3. Audit Event","[{""description"": ""Currently, this connector allows to ingest events from the following [Cisco Meraki REST API](https://developer.cisco.com/meraki/api-v1/#!get-organization-appliance-security-events) endpoint: \n 1. [Get Organization Appliance Security Events](https://developer.cisco.com/meraki/api-latest/#!get-organization-appliance-security-events) \n>This connector parses **IDS Alert** events into ASimNetworkSessionLogs Table and **File Scanned** events into ASimWebSessionLogs Table. \n 2. [Get Organization Api Requests](https://developer.cisco.com/meraki/api-latest/#!get-organization-api-requests) \n>This connector parses events into ASimWebSessionLogs Table. \n 3. [Get Organization Configuration Changes](https://developer.cisco.com/meraki/api-latest/#!get-organization-configuration-changes) \n>This connector parses events into ASimAuditEventLogs Table."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Organization Id"", ""placeholder"": ""OrganizationId"", ""type"": ""text"", ""name"": ""organization""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""ApiKey"", ""type"": ""password"", ""name"": ""apiKey""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""title"": ""Connect Cisco Meraki events to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Cisco Meraki REST API Key"", ""description"": ""Enable API access in Cisco Meraki and generate API Key. Please refer to Cisco Meraki official [documentation](https://aka.ms/ciscomerakiapikey) for more information.""}, {""name"": ""Cisco Meraki Organization Id"", ""description"": ""Obtain your Cisco Meraki organization id to fetch security events. Follow the steps in the [documentation](https://aka.ms/ciscomerakifindorg) to obtain the Organization Id using the Meraki API Key obtained in previous step.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Meraki%20Events%20via%20REST%20API/Data%20Connectors/CiscoMerakiMultiRule_ccp/dataConnectorDefinition.json","true" +"ASimWebSessionLogs","Cisco Meraki Events via REST API","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Meraki%20Events%20via%20REST%20API","azuresentinel","azure-sentinel-solution-ciscomerakinativepoller","2023-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoMerakiMultiRule","Microsoft","Cisco Meraki (using REST API)","The [Cisco Meraki](https://aka.ms/ciscomeraki) connector allows you to easily connect your Cisco Meraki organization events (Security events, Configuration Changes and API Requests) to Microsoft Sentinel. The data connector uses the [Cisco Meraki REST API](https://developer.cisco.com/meraki/api-v1/#!get-organization-appliance-security-events) to fetch logs and supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received data and ingests into ASIM and custom tables in your Log Analytics workspace. This data connector benefits from capabilities such as DCR based ingestion-time filtering, data normalization.

**Supported ASIM schema:**
1. Network Session
2. Web Session
3. Audit Event","[{""description"": ""Currently, this connector allows to ingest events from the following [Cisco Meraki REST API](https://developer.cisco.com/meraki/api-v1/#!get-organization-appliance-security-events) endpoint: \n 1. [Get Organization Appliance Security Events](https://developer.cisco.com/meraki/api-latest/#!get-organization-appliance-security-events) \n>This connector parses **IDS Alert** events into ASimNetworkSessionLogs Table and **File Scanned** events into ASimWebSessionLogs Table. \n 2. [Get Organization Api Requests](https://developer.cisco.com/meraki/api-latest/#!get-organization-api-requests) \n>This connector parses events into ASimWebSessionLogs Table. \n 3. [Get Organization Configuration Changes](https://developer.cisco.com/meraki/api-latest/#!get-organization-configuration-changes) \n>This connector parses events into ASimAuditEventLogs Table."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Organization Id"", ""placeholder"": ""OrganizationId"", ""type"": ""text"", ""name"": ""organization""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""ApiKey"", ""type"": ""password"", ""name"": ""apiKey""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""title"": ""Connect Cisco Meraki events to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Cisco Meraki REST API Key"", ""description"": ""Enable API access in Cisco Meraki and generate API Key. Please refer to Cisco Meraki official [documentation](https://aka.ms/ciscomerakiapikey) for more information.""}, {""name"": ""Cisco Meraki Organization Id"", ""description"": ""Obtain your Cisco Meraki organization id to fetch security events. Follow the steps in the [documentation](https://aka.ms/ciscomerakifindorg) to obtain the Organization Id using the Meraki API Key obtained in previous step.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Meraki%20Events%20via%20REST%20API/Data%20Connectors/CiscoMerakiMultiRule_ccp/dataConnectorDefinition.json","true" +"CiscoSDWANNetflow_CL","Cisco SD-WAN","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20SD-WAN","cisco","cisco-catalyst-sdwan-sentinel","2023-06-01","2024-06-01","","Cisco Systems","Partner","https://globalcontacts.cloudapps.cisco.com/contacts/contactDetails/en_US/c1o1-c2o2-c3o8","","domains","CiscoSDWAN","Cisco","Cisco Software Defined WAN","The Cisco Software Defined WAN(SD-WAN) data connector provides the capability to ingest [Cisco SD-WAN](https://www.cisco.com/c/en_in/solutions/enterprise-networks/sd-wan/index.html) Syslog and Netflow data into Microsoft Sentinel.","[{""description"": ""**To ingest Cisco SD-WAN Syslog and Netflow data into Microsoft Sentinel follow the steps below.**""}, {""title"": ""1. Steps to ingest Syslog data to Microsoft sentinel"", ""description"": ""Azure Monitor Agent will be used to collect the syslog data into Microsoft sentinel. For that first need to create an azure arc server for the VM from which syslog data will be sent.\n""}, {""title"": ""1.1 Steps to Add Azure Arc Server"", ""description"": ""1. In Azure portal, go to Servers - Azure Arc and click on Add.\n2. Select Generate Script under Add a single server section. A User can also generate scripts for Multiple Servers as well.\n3. Review the information on the Prerequisites page, then select Next.\n4. On the Resource details page, provide the subscription and resource group of the Microsoft Sentinel, Region, Operating system and Connectivity method. Then select Next.\n5. On the Tags page, review the default Physical location tags suggested and enter a value, or specify one or more Custom tags to support your standards. Then select Next\n6. Select Download to save the script file. \n7. Now that you have generated the script, the next step is to run it on the server that you want to onboard to Azure Arc. \n8. If you have Azure VM follow the steps mentioned in the [link](https://learn.microsoft.com/azure/azure-arc/servers/plan-evaluate-on-azure-virtual-machine) before running the script. \n9. Run the script by the following command: `./.sh`\n10. After you install the agent and configure it to connect to Azure Arc-enabled servers, go to the Azure portal to verify that the server has successfully connected. View your machine in the Azure portal.\n> **Reference link:** [https://learn.microsoft.com/azure/azure-arc/servers/learn/quick-enable-hybrid-vm](https://learn.microsoft.com/azure/azure-arc/servers/learn/quick-enable-hybrid-vm)""}, {""title"": ""1.2 Steps to Create Data Collection Rule (DCR)"", ""description"": ""1. In Azure Portal search for Monitor. Under Settings, select Data Collection Rules and Select Create.\n2. On the Basics panel, enter the Rule Name, Subscription, Resource group, Region and Platform Type.\n3. Select Next: Resources.\n4. Select Add resources.Use the filters to find the virtual machine that you'll use to collect logs.\n5. Select the virtual machine. Select Apply.\n6. Select Next: Collect and deliver.\n7. Select Add data source. For Data source type, select Linux syslog. \n8. For Minimum log level, leave the default values LOG_DEBUG.\n9. Select Next: Destination.\n10. Select Add destination and add Destination type, Subscription and Account or namespace.\n11. Select Add data source. Select Next: Review + create.\n12. Select Create. Wait for 20 minutes. In Microsoft Sentinel or Azure Monitor, verify that the Azure Monitor agent is running on your VM.\n> **Reference link:** [https://learn.microsoft.com/azure/sentinel/forward-syslog-monitor-agent](https://learn.microsoft.com/azure/sentinel/forward-syslog-monitor-agent)""}, {""title"": ""2. Steps to ingest Netflow data to Microsoft sentinel"", ""description"": ""To Ingest Netflow data into Microsoft sentinel, Filebeat and Logstash needs to be installed and configured on the VM. After the configuration, vm will be able to receive netflow data on the configured port and that data will be ingested into the workspace of Microsoft sentinel.\n""}, {""title"": ""2.1 Install filebeat and logstash"", ""description"": ""1. For the installation of filebeat and logstash using apt refer to this doc: \n 1. Filebeat: [https://www.elastic.co/guide/en/beats/filebeat/current/setup-repositories.html](https://www.elastic.co/guide/en/beats/filebeat/current/setup-repositories.html). \n 2. Logstash: [https://www.elastic.co/guide/en/logstash/current/installing-logstash.html](https://www.elastic.co/guide/en/logstash/current/installing-logstash.html). \n2. For the installation of filebeat and logstash for RedHat based Linux (yum) steps are as follows: \n 1. Filebeat: [https://www.elastic.co/guide/en/beats/filebeat/current/setup-repositories.html#_yum](https://www.elastic.co/guide/en/beats/filebeat/current/setup-repositories.html#_yum). \n 2. Logstash: [https://www.elastic.co/guide/en/logstash/current/installing-logstash.html#_yum](https://www.elastic.co/guide/en/logstash/current/installing-logstash.html#_yum)""}, {""title"": ""2.2 Configure Filebeat to send events to Logstash"", ""description"": ""1. Edit filebeat.yml file: `vi /etc/filebeat/filebeat.yml` \n2. Comment out the Elasticsearch Output section. \n3. Uncomment Logstash Output section (Uncomment out only these two lines)-\n\t\toutput.logstash\n\t\thosts: [\""localhost:5044\""] \n3. In the Logstash Output section, if you want to send the data other than the default port i.e. 5044 port, then replace the port number in the hosts field. (Note: This port should be added in the conf file, while configuring logstash.) \n4. In the 'filebeat.inputs' section comment out existing configuration and add the following configuration: \n\t\t- type: netflow\n\t\t max_message_size: 10KiB\n\t\t host: \""0.0.0.0:2055\""\n\t\t protocols: [ v5, v9, ipfix ]\n\t\t expiration_timeout: 30m\n\t\t queue_size: 8192\n\t\t custom_definitions:\n\t\t - /etc/filebeat/custom.yml\n\t\t detect_sequence_reset: true\n\t\t enabled: true \n6. In the Filebeat inputs section, if you want to receive the data other than the default port i.e. 2055 port, then replace the port number in the host field. \n7. Add the provided [custom.yml](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/Cisco%20SD-WAN/Data%20Connectors/custom.yml) file inside the /etc/filebeat/ directory. \n8. Open the filebeat input and output port in the firewall. \n 1. Run command: `firewall-cmd --zone=public --permanent --add-port=2055/udp` \n 2. Run command: `firewall-cmd --zone=public --permanent --add-port=5044/udp` \n> Note: if a custom port is added for filebeat input/output, then open that port in the firewall.""}, {""title"": ""2.3 Configure Logstash to send events to Microsoft Sentinel"", ""description"": ""1. Install the Azure Log Analytics plugin: \n 1. Run Command: `sudo /usr/share/logstash/bin/logstash-plugin install microsoft-logstash-output-azure-loganalytics` \n3. Store the Log Analytics workspace key in the Logstash key store. The workspace key can be found in Azure Portal under Log analytic workspace > Select workspace > Under Settings select Agent > Log Analytics agent instructions. \n4. Copy the Primary key and run the following commands: \n 1. `sudo /usr/share/logstash/bin/logstash-keystore --path.settings /etc/logstash create LogAnalyticsKey` \n 2. `sudo /usr/share/logstash/bin/logstash-keystore --path.settings /etc/logstash add LogAnalyticsKey` \n5. Create the configuration file /etc/logstash/cisco-netflow-to-sentinel.conf: \n\t\tinput {\n\t\t beats {\n\t\t port => #(Enter output port number which has been configured during filebeat configuration i.e. filebeat.yml file .)\n\t\t }\n\t\t}\n\t\toutput {\n\t\t microsoft-logstash-output-azure-loganalytics {\n\t\t workspace_id => \""\""\n\t\t workspace_key => \""${LogAnalyticsKey}\""\n\t\t custom_log_table_name => \""CiscoSDWANNetflow\""\n\t\t }\n\t\t} \n> Note: If table is not present in Microsoft sentinel, then it will create a new table in sentinel.""}, {""title"": ""2.4 Run Filebeat:"", ""description"": ""1. Open a terminal and run the command: \n> `systemctl start filebeat` \n2. This command will start running filebeat in the background. To see the logs stop the filebeat (`systemctl stop filebeat`) then run the following command: \n> `filebeat run -e`""}, {""title"": ""2.5 Run Logstash:"", ""description"": ""1. In another terminal run the command: \n> `/usr/share/logstash/bin/logstash --path.settings /etc/logstash -f /etc/logstash/cisco-netflow-to-sentinel.conf &` \n2. This command will start running the logstash in the background. To see the logs of logstash kill the above process and run the following command : \n> `/usr/share/logstash/bin/logstash --path.settings /etc/logstash -f /etc/logstash/cisco-netflow-to-sentinel.conf`""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20SD-WAN/Data%20Connectors/CiscoSDWAN.json","true" +"Syslog","Cisco SD-WAN","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20SD-WAN","cisco","cisco-catalyst-sdwan-sentinel","2023-06-01","2024-06-01","","Cisco Systems","Partner","https://globalcontacts.cloudapps.cisco.com/contacts/contactDetails/en_US/c1o1-c2o2-c3o8","","domains","CiscoSDWAN","Cisco","Cisco Software Defined WAN","The Cisco Software Defined WAN(SD-WAN) data connector provides the capability to ingest [Cisco SD-WAN](https://www.cisco.com/c/en_in/solutions/enterprise-networks/sd-wan/index.html) Syslog and Netflow data into Microsoft Sentinel.","[{""description"": ""**To ingest Cisco SD-WAN Syslog and Netflow data into Microsoft Sentinel follow the steps below.**""}, {""title"": ""1. Steps to ingest Syslog data to Microsoft sentinel"", ""description"": ""Azure Monitor Agent will be used to collect the syslog data into Microsoft sentinel. For that first need to create an azure arc server for the VM from which syslog data will be sent.\n""}, {""title"": ""1.1 Steps to Add Azure Arc Server"", ""description"": ""1. In Azure portal, go to Servers - Azure Arc and click on Add.\n2. Select Generate Script under Add a single server section. A User can also generate scripts for Multiple Servers as well.\n3. Review the information on the Prerequisites page, then select Next.\n4. On the Resource details page, provide the subscription and resource group of the Microsoft Sentinel, Region, Operating system and Connectivity method. Then select Next.\n5. On the Tags page, review the default Physical location tags suggested and enter a value, or specify one or more Custom tags to support your standards. Then select Next\n6. Select Download to save the script file. \n7. Now that you have generated the script, the next step is to run it on the server that you want to onboard to Azure Arc. \n8. If you have Azure VM follow the steps mentioned in the [link](https://learn.microsoft.com/azure/azure-arc/servers/plan-evaluate-on-azure-virtual-machine) before running the script. \n9. Run the script by the following command: `./.sh`\n10. After you install the agent and configure it to connect to Azure Arc-enabled servers, go to the Azure portal to verify that the server has successfully connected. View your machine in the Azure portal.\n> **Reference link:** [https://learn.microsoft.com/azure/azure-arc/servers/learn/quick-enable-hybrid-vm](https://learn.microsoft.com/azure/azure-arc/servers/learn/quick-enable-hybrid-vm)""}, {""title"": ""1.2 Steps to Create Data Collection Rule (DCR)"", ""description"": ""1. In Azure Portal search for Monitor. Under Settings, select Data Collection Rules and Select Create.\n2. On the Basics panel, enter the Rule Name, Subscription, Resource group, Region and Platform Type.\n3. Select Next: Resources.\n4. Select Add resources.Use the filters to find the virtual machine that you'll use to collect logs.\n5. Select the virtual machine. Select Apply.\n6. Select Next: Collect and deliver.\n7. Select Add data source. For Data source type, select Linux syslog. \n8. For Minimum log level, leave the default values LOG_DEBUG.\n9. Select Next: Destination.\n10. Select Add destination and add Destination type, Subscription and Account or namespace.\n11. Select Add data source. Select Next: Review + create.\n12. Select Create. Wait for 20 minutes. In Microsoft Sentinel or Azure Monitor, verify that the Azure Monitor agent is running on your VM.\n> **Reference link:** [https://learn.microsoft.com/azure/sentinel/forward-syslog-monitor-agent](https://learn.microsoft.com/azure/sentinel/forward-syslog-monitor-agent)""}, {""title"": ""2. Steps to ingest Netflow data to Microsoft sentinel"", ""description"": ""To Ingest Netflow data into Microsoft sentinel, Filebeat and Logstash needs to be installed and configured on the VM. After the configuration, vm will be able to receive netflow data on the configured port and that data will be ingested into the workspace of Microsoft sentinel.\n""}, {""title"": ""2.1 Install filebeat and logstash"", ""description"": ""1. For the installation of filebeat and logstash using apt refer to this doc: \n 1. Filebeat: [https://www.elastic.co/guide/en/beats/filebeat/current/setup-repositories.html](https://www.elastic.co/guide/en/beats/filebeat/current/setup-repositories.html). \n 2. Logstash: [https://www.elastic.co/guide/en/logstash/current/installing-logstash.html](https://www.elastic.co/guide/en/logstash/current/installing-logstash.html). \n2. For the installation of filebeat and logstash for RedHat based Linux (yum) steps are as follows: \n 1. Filebeat: [https://www.elastic.co/guide/en/beats/filebeat/current/setup-repositories.html#_yum](https://www.elastic.co/guide/en/beats/filebeat/current/setup-repositories.html#_yum). \n 2. Logstash: [https://www.elastic.co/guide/en/logstash/current/installing-logstash.html#_yum](https://www.elastic.co/guide/en/logstash/current/installing-logstash.html#_yum)""}, {""title"": ""2.2 Configure Filebeat to send events to Logstash"", ""description"": ""1. Edit filebeat.yml file: `vi /etc/filebeat/filebeat.yml` \n2. Comment out the Elasticsearch Output section. \n3. Uncomment Logstash Output section (Uncomment out only these two lines)-\n\t\toutput.logstash\n\t\thosts: [\""localhost:5044\""] \n3. In the Logstash Output section, if you want to send the data other than the default port i.e. 5044 port, then replace the port number in the hosts field. (Note: This port should be added in the conf file, while configuring logstash.) \n4. In the 'filebeat.inputs' section comment out existing configuration and add the following configuration: \n\t\t- type: netflow\n\t\t max_message_size: 10KiB\n\t\t host: \""0.0.0.0:2055\""\n\t\t protocols: [ v5, v9, ipfix ]\n\t\t expiration_timeout: 30m\n\t\t queue_size: 8192\n\t\t custom_definitions:\n\t\t - /etc/filebeat/custom.yml\n\t\t detect_sequence_reset: true\n\t\t enabled: true \n6. In the Filebeat inputs section, if you want to receive the data other than the default port i.e. 2055 port, then replace the port number in the host field. \n7. Add the provided [custom.yml](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/Cisco%20SD-WAN/Data%20Connectors/custom.yml) file inside the /etc/filebeat/ directory. \n8. Open the filebeat input and output port in the firewall. \n 1. Run command: `firewall-cmd --zone=public --permanent --add-port=2055/udp` \n 2. Run command: `firewall-cmd --zone=public --permanent --add-port=5044/udp` \n> Note: if a custom port is added for filebeat input/output, then open that port in the firewall.""}, {""title"": ""2.3 Configure Logstash to send events to Microsoft Sentinel"", ""description"": ""1. Install the Azure Log Analytics plugin: \n 1. Run Command: `sudo /usr/share/logstash/bin/logstash-plugin install microsoft-logstash-output-azure-loganalytics` \n3. Store the Log Analytics workspace key in the Logstash key store. The workspace key can be found in Azure Portal under Log analytic workspace > Select workspace > Under Settings select Agent > Log Analytics agent instructions. \n4. Copy the Primary key and run the following commands: \n 1. `sudo /usr/share/logstash/bin/logstash-keystore --path.settings /etc/logstash create LogAnalyticsKey` \n 2. `sudo /usr/share/logstash/bin/logstash-keystore --path.settings /etc/logstash add LogAnalyticsKey` \n5. Create the configuration file /etc/logstash/cisco-netflow-to-sentinel.conf: \n\t\tinput {\n\t\t beats {\n\t\t port => #(Enter output port number which has been configured during filebeat configuration i.e. filebeat.yml file .)\n\t\t }\n\t\t}\n\t\toutput {\n\t\t microsoft-logstash-output-azure-loganalytics {\n\t\t workspace_id => \""\""\n\t\t workspace_key => \""${LogAnalyticsKey}\""\n\t\t custom_log_table_name => \""CiscoSDWANNetflow\""\n\t\t }\n\t\t} \n> Note: If table is not present in Microsoft sentinel, then it will create a new table in sentinel.""}, {""title"": ""2.4 Run Filebeat:"", ""description"": ""1. Open a terminal and run the command: \n> `systemctl start filebeat` \n2. This command will start running filebeat in the background. To see the logs stop the filebeat (`systemctl stop filebeat`) then run the following command: \n> `filebeat run -e`""}, {""title"": ""2.5 Run Logstash:"", ""description"": ""1. In another terminal run the command: \n> `/usr/share/logstash/bin/logstash --path.settings /etc/logstash -f /etc/logstash/cisco-netflow-to-sentinel.conf &` \n2. This command will start running the logstash in the background. To see the logs of logstash kill the above process and run the following command : \n> `/usr/share/logstash/bin/logstash --path.settings /etc/logstash -f /etc/logstash/cisco-netflow-to-sentinel.conf`""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20SD-WAN/Data%20Connectors/CiscoSDWAN.json","true" +"Syslog","Cisco Secure Cloud Analytics","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Cloud%20Analytics","azuresentinel","azure-sentinel-solution-ciscostealthwatch","2021-10-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Stealthwatch","Cisco","[Deprecated] Cisco Secure Cloud Analytics","The [Cisco Secure Cloud Analytics](https://www.cisco.com/c/en/us/products/security/stealthwatch/index.html) data connector provides the capability to ingest [Cisco Secure Cloud Analytics events](https://www.cisco.com/c/dam/en/us/td/docs/security/stealthwatch/management_console/securit_events_alarm_categories/7_4_2_Security_Events_and_Alarm_Categories_DV_2_1.pdf) into Microsoft Sentinel. Refer to [Cisco Secure Cloud Analytics documentation](https://www.cisco.com/c/dam/en/us/td/docs/security/stealthwatch/system_installation_configuration/7_5_0_System_Configuration_Guide_DV_1_3.pdf) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**StealthwatchEvent**](https://aka.ms/sentinel-stealthwatch-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using Cisco Secure Cloud Analytics version 7.3.2"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Server where the Cisco Secure Cloud Analytics logs are forwarded.\n\n> Logs from Cisco Secure Cloud Analytics Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure Cisco Secure Cloud Analytics event forwarding"", ""description"": ""Follow the configuration steps below to get Cisco Secure Cloud Analytics logs into Microsoft Sentinel.\n1. Log in to the Stealthwatch Management Console (SMC) as an administrator.\n2. In the menu bar, click **Configuration** **>** **Response Management**.\n3. From the **Actions** section in the **Response Management** menu, click **Add > Syslog Message**.\n4. In the Add Syslog Message Action window, configure parameters.\n5. Enter the following custom format:\n|Lancope|Stealthwatch|7.3|{alarm_type_id}|0x7C|src={source_ip}|dst={target_ip}|dstPort={port}|proto={protocol}|msg={alarm_type_description}|fullmessage={details}|start={start_active_time}|end={end_active_time}|cat={alarm_category_name}|alarmID={alarm_id}|sourceHG={source_host_group_names}|targetHG={target_host_group_names}|sourceHostSnapshot={source_url}|targetHostSnapshot={target_url}|flowCollectorName={device_name}|flowCollectorIP={device_ip}|domain={domain_name}|exporterName={exporter_hostname}|exporterIPAddress={exporter_ip}|exporterInfo={exporter_label}|targetUser={target_username}|targetHostname={target_hostname}|sourceUser={source_username}|alarmStatus={alarm_status}|alarmSev={alarm_severity_name}\n\n6. Select the custom format from the list and click **OK**\n7. Click **Response Management > Rules**.\n8. Click **Add** and select **Host Alarm**.\n9. Provide a rule name in the **Name** field.\n10. Create rules by selecting values from the Type and Options menus. To add more rules, click the ellipsis icon. For a Host Alarm, combine as many possible types in a statement as possible.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Cloud%20Analytics/Data%20Connectors/Cisco_Stealthwatch_syslog.json","true" +"CiscoSecureEndpoint_CL","Cisco Secure Endpoint","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Endpoint","azuresentinel","azure-sentinel-solution-ciscosecureendpoint","2021-10-28","2022-02-02","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoSecureEndpoint","Cisco","[DEPRECATED] Cisco Secure Endpoint (AMP)","The Cisco Secure Endpoint (formerly AMP for Endpoints) data connector provides the capability to ingest Cisco Secure Endpoint [audit logs](https://api-docs.amp.cisco.com/api_resources/AuditLog?api_host=api.amp.cisco.com&api_version=v1) and [events](https://api-docs.amp.cisco.com/api_actions/details?api_action=GET+%2Fv1%2Fevents&api_host=api.amp.cisco.com&api_resource=Event&api_version=v1) into Microsoft Sentinel.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Cisco Secure Endpoint API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**CiscoSecureEndpoint**](https://aka.ms/sentinel-ciscosecureendpoint-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Obtaining Cisco Secure Endpoint API credentials**\n\n1. Follow the instructions in the [documentation](https://api-docs.amp.cisco.com/api_resources?api_host=api.amp.cisco.com&api_version=v1) to generate Client ID and API Key.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Azure Blob Storage connection string and container name, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ciscosecureendpoint-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Cisco Secure Endpoint Api Host**, **Cisco Secure Endpoint Client Id**, **Cisco Secure Endpoint Api Key**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-ciscosecureendpoint-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions.\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tCISCO_SE_API_API_HOST\n\t\tCISCO_SE_API_CLIENT_ID\n\t\tCISCO_SE_API_KEY\n\t\tWORKSPACE_ID\n\t\tSHARED_KEY\n\t\tlogAnalyticsUri (Optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Cisco Secure Endpoint API credentials"", ""description"": ""Cisco Secure Endpoint Client ID and API Key are required. [See the documentation to learn more about Cisco Secure Endpoint API](https://api-docs.amp.cisco.com/api_resources?api_host=api.amp.cisco.com&api_version=v1). [API domain](https://api-docs.amp.cisco.com) must be provided as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Endpoint/Data%20Connectors/CiscoSecureEndpoint_API_FunctionApp.json","true" +"CiscoSecureEndpointAuditLogsV2_CL","Cisco Secure Endpoint","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Endpoint","azuresentinel","azure-sentinel-solution-ciscosecureendpoint","2021-10-28","2022-02-02","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoSecureEndpointLogsCCPDefinition","Microsoft","Cisco Secure Endpoint (via Codeless Connector Framework)","The Cisco Secure Endpoint (formerly AMP for Endpoints) data connector provides the capability to ingest Cisco Secure Endpoint [audit logs](https://developer.cisco.com/docs/secure-endpoint/auditlog/) and [events](https://developer.cisco.com/docs/secure-endpoint/v1-api-reference-event/) into Microsoft Sentinel.","[{""description"": ""To ingest data from Cisco Secure Endpoint to Microsoft Sentinel, you have to click on Add Account button below, then you get a pop up to fill the details like Email, Organization, Client ID, API Key and Region, provide the required information and click on Connect. You can see the connected organizations/emails in the below grid.\n>"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Organization"", ""columnValue"": ""properties.addOnAttributes.Organization""}, {""columnName"": ""Email"", ""columnValue"": ""properties.addOnAttributes.Email""}, {""columnName"": ""Endpoint"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Cisco Secure Endpoint Email"", ""placeholder"": ""Enter your Cisco Email"", ""type"": ""text"", ""name"": ""email"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Cisco Secure Endpoint Organization"", ""placeholder"": ""Enter the name of your Organization"", ""type"": ""text"", ""name"": ""organization"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Cisco Secure Endpoint Client ID"", ""placeholder"": ""Enter your Client ID"", ""type"": ""text"", ""name"": ""username"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Cisco Secure Endpoint API Key"", ""placeholder"": ""Enter your API Key"", ""type"": ""password"", ""name"": ""apiKey"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Cisco Secure Endpoint Region"", ""placeholder"": ""Enter the region you want to connect"", ""type"": ""text"", ""name"": ""region"", ""required"": true, ""description"": ""For example, if your region is https://api.apjc.amp.cisco.com then enter only apjc.amp in the above field. Follow the link provided in the Cisco Secure Endpoint API Credentials/Regions section for better understanding of the regions.""}}]}]}}], ""title"": ""Connect Cisco Secure Endpoint to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Cisco Secure Endpoint API Credentials/Regions"", ""description"": ""To create API Credentials and to understand the regions, follow the document link provided here. [Click here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Endpoint/Data%20Connectors/README.md).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Endpoint/Data%20Connectors/CiscoSecureEndpointLogs_ccp/CiscoSecureEndpointLogs_ConnectorDefinition.json","true" +"CiscoSecureEndpointEventsV2_CL","Cisco Secure Endpoint","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Endpoint","azuresentinel","azure-sentinel-solution-ciscosecureendpoint","2021-10-28","2022-02-02","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoSecureEndpointLogsCCPDefinition","Microsoft","Cisco Secure Endpoint (via Codeless Connector Framework)","The Cisco Secure Endpoint (formerly AMP for Endpoints) data connector provides the capability to ingest Cisco Secure Endpoint [audit logs](https://developer.cisco.com/docs/secure-endpoint/auditlog/) and [events](https://developer.cisco.com/docs/secure-endpoint/v1-api-reference-event/) into Microsoft Sentinel.","[{""description"": ""To ingest data from Cisco Secure Endpoint to Microsoft Sentinel, you have to click on Add Account button below, then you get a pop up to fill the details like Email, Organization, Client ID, API Key and Region, provide the required information and click on Connect. You can see the connected organizations/emails in the below grid.\n>"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Organization"", ""columnValue"": ""properties.addOnAttributes.Organization""}, {""columnName"": ""Email"", ""columnValue"": ""properties.addOnAttributes.Email""}, {""columnName"": ""Endpoint"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Cisco Secure Endpoint Email"", ""placeholder"": ""Enter your Cisco Email"", ""type"": ""text"", ""name"": ""email"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Cisco Secure Endpoint Organization"", ""placeholder"": ""Enter the name of your Organization"", ""type"": ""text"", ""name"": ""organization"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Cisco Secure Endpoint Client ID"", ""placeholder"": ""Enter your Client ID"", ""type"": ""text"", ""name"": ""username"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Cisco Secure Endpoint API Key"", ""placeholder"": ""Enter your API Key"", ""type"": ""password"", ""name"": ""apiKey"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Cisco Secure Endpoint Region"", ""placeholder"": ""Enter the region you want to connect"", ""type"": ""text"", ""name"": ""region"", ""required"": true, ""description"": ""For example, if your region is https://api.apjc.amp.cisco.com then enter only apjc.amp in the above field. Follow the link provided in the Cisco Secure Endpoint API Credentials/Regions section for better understanding of the regions.""}}]}]}}], ""title"": ""Connect Cisco Secure Endpoint to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Cisco Secure Endpoint API Credentials/Regions"", ""description"": ""To create API Credentials and to understand the regions, follow the document link provided here. [Click here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Endpoint/Data%20Connectors/README.md).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Endpoint/Data%20Connectors/CiscoSecureEndpointLogs_ccp/CiscoSecureEndpointLogs_ConnectorDefinition.json","true" +"Syslog","Cisco UCS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20UCS","azuresentinel","azure-sentinel-solution-ciscoucs","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoUCS","Cisco","[Deprecated] Cisco UCS","The [Cisco Unified Computing System (UCS)](https://www.cisco.com/c/en/us/products/servers-unified-computing/index.html) connector allows you to easily connect your Cisco UCS logs with Microsoft Sentinel This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias CiscoUCS and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20UCS/Parsers/CiscoUCS.yaml). The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n 2. Select **Apply below configuration to my machines** and select the facilities and severities.\n 3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure and connect the Cisco UCS"", ""description"": ""[Follow these instructions](https://www.cisco.com/c/en/us/support/docs/servers-unified-computing/ucs-manager/110265-setup-syslog-for-ucs.html#configsremotesyslog) to configure the Cisco UCS to forward syslog. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Cisco UCS"", ""description"": ""must be configured to export logs via Syslog""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20UCS/Data%20Connectors/Connector_Syslog_CiscoUCS.json","true" +"","CiscoASA","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoASA","azuresentinel","azure-sentinel-solution-ciscoasa","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"CiscoDuo_CL","CiscoDuoSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoDuoSecurity","cisco","duo-security-sentinel","2022-01-07","","","Cisco Systems","Partner","https://duo.com/support","","domains","CiscoDuoSecurity","Cisco","Cisco Duo Security","The Cisco Duo Security data connector provides the capability to ingest [authentication logs](https://duo.com/docs/adminapi#authentication-logs), [administrator logs](https://duo.com/docs/adminapi#administrator-logs), [telephony logs](https://duo.com/docs/adminapi#telephony-logs), [offline enrollment logs](https://duo.com/docs/adminapi#offline-enrollment-logs) and [Trust Monitor events](https://duo.com/docs/adminapi#trust-monitor) into Microsoft Sentinel using the Cisco Duo Admin API. Refer to [API documentation](https://duo.com/docs/adminapi) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Cisco Duo API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**CiscoDuo**](https://aka.ms/sentinel-CiscoDuoSecurity-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Obtaining Cisco Duo Admin API credentials**\n\n1. Follow [the instructions](https://duo.com/docs/adminapi#first-steps) to obtain **integration key**, **secret key**, and **API hostname**. Use **Grant read log** permission in the 4th step of [the instructions](https://duo.com/docs/adminapi#first-steps).""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Azure Blob Storage connection string and container name, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CiscoDuoSecurity-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CiscoDuoSecurity-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Cisco Duo Integration Key**, **Cisco Duo Secret Key**, **Cisco Duo API Hostname**, **Cisco Duo Log Types**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentinel-CiscoDuoSecurity-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tCISCO_DUO_INTEGRATION_KEY\n\t\tCISCO_DUO_SECRET_KEY\n\t\tCISCO_DUO_API_HOSTNAME\n\t\tCISCO_DUO_LOG_TYPES\n\t\tWORKSPACE_ID\n\t\tSHARED_KEY\n\t\tlogAnalyticsUri (Optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. \n4. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Cisco Duo API credentials"", ""description"": ""Cisco Duo API credentials with permission *Grant read log* is required for Cisco Duo API. See the [documentation](https://duo.com/docs/adminapi#first-steps) to learn more about creating Cisco Duo API credentials.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoDuoSecurity/Data%20Connectors/CiscoDuo_API_FunctionApp.json","true" +"meraki_CL","CiscoMeraki","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoMeraki","azuresentinel","azure-sentinel-solution-ciscomeraki","2021-09-08","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoMeraki","Cisco","[Deprecated] Cisco Meraki","The [Cisco Meraki](https://meraki.cisco.com/) connector allows you to easily connect your Cisco Meraki (MX/MR/MS) logs with Microsoft Sentinel. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias CiscoMeraki and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoMeraki/Parsers/CiscoMeraki.txt). The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Follow the configuration steps below to get Cisco Meraki device logs into Microsoft Sentinel. Refer to the [Azure Monitor Documentation](https://docs.microsoft.com/azure/azure-monitor/agents/data-sources-json) for more details on these steps.\n For Cisco Meraki logs, we have issues while parsing the data by OMS agent data using default settings. \nSo we advice to capture the logs into custom table **meraki_CL** using below instructions. \n1. Login to the server where you have installed OMS agent.\n2. Download config file [meraki.conf](https://aka.ms/sentinel-ciscomerakioms-conf) \n\t\twget -v https://aka.ms/sentinel-ciscomerakioms-conf -O meraki.conf \n3. Copy meraki.conf to the /etc/opt/microsoft/omsagent/**workspace_id**/conf/omsagent.d/ folder. \n\t\tcp meraki.conf /etc/opt/microsoft/omsagent/<>/conf/omsagent.d/\n4. Edit meraki.conf as follows:\n\n\t a. meraki.conf uses the port **22033** by default. Ensure this port is not being used by any other source on your server\n\n\t b. If you would like to change the default port for **meraki.conf** make sure that you dont use default Azure monitoring /log analytic agent ports I.e.(For example CEF uses TCP port **25226** or **25224**) \n\n\t c. replace **workspace_id** with real value of your Workspace ID (lines 14,15,16,19)\n5. Save changes and restart the Azure Log Analytics agent for Linux service with the following command:\n\t\tsudo /opt/microsoft/omsagent/bin/service_control restart\n6. Modify /etc/rsyslog.conf file - add below template preferably at the beginning / before directives section \n\t\t$template meraki,\""%timestamp% %hostname% %msg%\\n\"" \n7. Create a custom conf file in /etc/rsyslog.d/ for example 10-meraki.conf and add following filter conditions.\n\n\t With an added statement you will need to create a filter which will specify the logs coming from the Cisco Meraki to be forwarded to the custom table.\n\n\t reference: [Filter Conditions \u2014 rsyslog 8.18.0.master documentation](https://rsyslog.readthedocs.io/en/latest/configuration/filters.html)\n\n\t Here is an example of filtering that can be defined, this is not complete and will require additional testing for each installation.\n\t\t if $rawmsg contains \""flows\"" then @@127.0.0.1:22033;meraki\n\t\t & stop\n\t\t if $rawmsg contains \""firewall\"" then @@127.0.0.1:22033;meraki\n\t\t & stop\n\t\t if $rawmsg contains \""urls\"" then @@127.0.0.1:22033;meraki\n\t\t & stop\n\t\t if $rawmsg contains \""ids-alerts\"" then @@127.0.0.1:22033;meraki\n\t\t & stop\n\t\t if $rawmsg contains \""events\"" then @@127.0.0.1:22033;meraki\n\t\t & stop\n\t\t if $rawmsg contains \""ip_flow_start\"" then @@127.0.0.1:22033;meraki\n\t\t & stop\n\t\t if $rawmsg contains \""ip_flow_end\"" then @@127.0.0.1:22033;meraki\n\t\t & stop \n8. Restart rsyslog\n\t\t systemctl restart rsyslog"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Configure and connect the Cisco Meraki device(s)"", ""description"": ""[Follow these instructions](https://documentation.meraki.com/General_Administration/Monitoring_and_Reporting/Meraki_Device_Reporting_-_Syslog%2C_SNMP_and_API) to configure the Cisco Meraki device(s) to forward syslog. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Cisco Meraki"", ""description"": ""must be configured to export logs via Syslog""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoMeraki/Data%20Connectors/Connector_Syslog_CiscoMeraki.json","true" +"CiscoMerakiNativePoller_CL","CiscoMeraki","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoMeraki","azuresentinel","azure-sentinel-solution-ciscomeraki","2021-09-08","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoMerakiNativePoller","Microsoft","Cisco Meraki (using REST API)","The [Cisco Meraki](https://aka.ms/ciscomeraki) connector allows you to easily connect your Cisco Meraki MX [security events](https://aka.ms/ciscomerakisecurityevents) to Microsoft Sentinel. The data connector uses [Cisco Meraki REST API](https://developer.cisco.com/meraki/api-v1/#!get-organization-appliance-security-events) to fetch security events and supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance.

**Supported ASIM schema:**
1. Network Session","[{""title"": ""Connect Cisco Meraki Security Events to Microsoft Sentinel"", ""description"": ""To enable Cisco Meraki Security Events for Microsoft Sentinel, provide the required information below and click on Connect.\n>This data connector depends on a parser based on a Kusto Function to render the content. [**CiscoMeraki**](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/CiscoMeraki/Parsers/CiscoMeraki.txt) Parser currently support only \""**IDS Alert**\"" and \""**File Scanned**\"" Events."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Organization Id"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{organization}}""}], ""transformation"": {""transformationType"": ""predefinedTransformation"", ""outputStream"": ""[concat('Custom-', variables('streamName'))]"", ""dataCollectionRuleTemplateSpecName"": ""[variables('dataCollectionRuleId')]"", ""logAnalyticsTableTemplateSpecName"": ""[variables('logAnalyticsTableId')]""}}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Cisco Meraki REST API Key"", ""description"": ""Enable API access in Cisco Meraki and generate API Key. Please refer to Cisco Meraki official [documentation](https://aka.ms/ciscomerakiapikey) for more information.""}, {""name"": ""Cisco Meraki Organization Id"", ""description"": ""Obtain your Cisco Meraki organization id to fetch security events. Follow the steps in the [documentation](https://aka.ms/ciscomerakifindorg) to obtain the Organization Id using the Meraki API Key obtained in previous step.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoMeraki/Data%20Connectors/CiscoMerakiNativePollerConnector/azuredeploy_Cisco_Meraki_native_poller_connector.json","true" +"meraki_CL","CiscoMeraki","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoMeraki","azuresentinel","azure-sentinel-solution-ciscomeraki","2021-09-08","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoMerakiNativePoller","Microsoft","Cisco Meraki (using REST API)","The [Cisco Meraki](https://aka.ms/ciscomeraki) connector allows you to easily connect your Cisco Meraki MX [security events](https://aka.ms/ciscomerakisecurityevents) to Microsoft Sentinel. The data connector uses [Cisco Meraki REST API](https://developer.cisco.com/meraki/api-v1/#!get-organization-appliance-security-events) to fetch security events and supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance.

**Supported ASIM schema:**
1. Network Session","[{""title"": ""Connect Cisco Meraki Security Events to Microsoft Sentinel"", ""description"": ""To enable Cisco Meraki Security Events for Microsoft Sentinel, provide the required information below and click on Connect.\n>This data connector depends on a parser based on a Kusto Function to render the content. [**CiscoMeraki**](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/CiscoMeraki/Parsers/CiscoMeraki.txt) Parser currently support only \""**IDS Alert**\"" and \""**File Scanned**\"" Events."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Organization Id"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{organization}}""}], ""transformation"": {""transformationType"": ""predefinedTransformation"", ""outputStream"": ""[concat('Custom-', variables('streamName'))]"", ""dataCollectionRuleTemplateSpecName"": ""[variables('dataCollectionRuleId')]"", ""logAnalyticsTableTemplateSpecName"": ""[variables('logAnalyticsTableId')]""}}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Cisco Meraki REST API Key"", ""description"": ""Enable API access in Cisco Meraki and generate API Key. Please refer to Cisco Meraki official [documentation](https://aka.ms/ciscomerakiapikey) for more information.""}, {""name"": ""Cisco Meraki Organization Id"", ""description"": ""Obtain your Cisco Meraki organization id to fetch security events. Follow the steps in the [documentation](https://aka.ms/ciscomerakifindorg) to obtain the Organization Id using the Meraki API Key obtained in previous step.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoMeraki/Data%20Connectors/CiscoMerakiNativePollerConnector/azuredeploy_Cisco_Meraki_native_poller_connector.json","true" +"CommonSecurityLog","CiscoSEG","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoSEG","azuresentinel","azure-sentinel-solution-ciscoseg","2021-06-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoSEG","Cisco","[Deprecated] Cisco Secure Email Gateway via Legacy Agent","The [Cisco Secure Email Gateway (SEG)](https://www.cisco.com/c/en/us/products/security/email-security/index.html) data connector provides the capability to ingest [Cisco SEG Consolidated Event Logs](https://www.cisco.com/c/en/us/td/docs/security/esa/esa14-0/user_guide/b_ESA_Admin_Guide_14-0/b_ESA_Admin_Guide_12_1_chapter_0100111.html#con_1061902) into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**CiscoSEGEvent**](https://aka.ms/sentinel-CiscoSEG-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using AsyncOS 14.0 for Cisco Secure Email Gateway"", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Follow these steps to configure Cisco Secure Email Gateway to forward logs via syslog:\n\n2.1. Configure [Log Subscription](https://www.cisco.com/c/en/us/td/docs/security/esa/esa14-0/user_guide/b_ESA_Admin_Guide_14-0/b_ESA_Admin_Guide_12_1_chapter_0100111.html#con_1134718)\n\n>**NOTE:** Select **Consolidated Event Logs** in Log Type field.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoSEG/Data%20Connectors/Connector_Cisco_SEG_CEF.json","true" +"CommonSecurityLog","CiscoSEG","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoSEG","azuresentinel","azure-sentinel-solution-ciscoseg","2021-06-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoSEGAma","Cisco","[Deprecated] Cisco Secure Email Gateway via AMA","The [Cisco Secure Email Gateway (SEG)](https://www.cisco.com/c/en/us/products/security/email-security/index.html) data connector provides the capability to ingest [Cisco SEG Consolidated Event Logs](https://www.cisco.com/c/en/us/td/docs/security/esa/esa14-0/user_guide/b_ESA_Admin_Guide_14-0/b_ESA_Admin_Guide_12_1_chapter_0100111.html#con_1061902) into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**CiscoSEGEvent**](https://aka.ms/sentinel-CiscoSEG-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Follow these steps to configure Cisco Secure Email Gateway to forward logs via syslog:\n\n Configure [Log Subscription](https://www.cisco.com/c/en/us/td/docs/security/esa/esa14-0/user_guide/b_ESA_Admin_Guide_14-0/b_ESA_Admin_Guide_12_1_chapter_0100111.html#con_1134718)\n\n>**NOTE:** Select **Consolidated Event Logs** in Log Type field."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""2Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoSEG/Data%20Connectors/template_CiscoSEGAMA.json","true" +"Cisco_Umbrella_audit_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [Cisco Cloud Security log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Cisco Cloud Security logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Cloud Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Cloud Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Cloud Security data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" +"Cisco_Umbrella_cloudfirewall_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [Cisco Cloud Security log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Cisco Cloud Security logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Cloud Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Cloud Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Cloud Security data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" +"Cisco_Umbrella_dlp_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [Cisco Cloud Security log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Cisco Cloud Security logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Cloud Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Cloud Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Cloud Security data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" +"Cisco_Umbrella_dns_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [Cisco Cloud Security log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Cisco Cloud Security logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Cloud Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Cloud Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Cloud Security data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" +"Cisco_Umbrella_fileevent_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [Cisco Cloud Security log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Cisco Cloud Security logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Cloud Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Cloud Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Cloud Security data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" +"Cisco_Umbrella_firewall_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [Cisco Cloud Security log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Cisco Cloud Security logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Cloud Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Cloud Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Cloud Security data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" +"Cisco_Umbrella_intrusion_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [Cisco Cloud Security log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Cisco Cloud Security logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Cloud Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Cloud Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Cloud Security data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" +"Cisco_Umbrella_ip_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [Cisco Cloud Security log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Cisco Cloud Security logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Cloud Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Cloud Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Cloud Security data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" +"Cisco_Umbrella_proxy_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [Cisco Cloud Security log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Cisco Cloud Security logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Cloud Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Cloud Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Cloud Security data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" +"Cisco_Umbrella_ravpnlogs_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [Cisco Cloud Security log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Cisco Cloud Security logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Cloud Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Cloud Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Cloud Security data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" +"Cisco_Umbrella_ztaflow_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [Cisco Cloud Security log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Cisco Cloud Security logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Cloud Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Cloud Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Cloud Security data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" +"Cisco_Umbrella_ztna_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [Cisco Cloud Security log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Cisco Cloud Security logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Cloud Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Cloud Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Cloud Security data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" +"Cisco_Umbrella_audit_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [cisco umbrella log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Network Prerequisites for Private Access**\n\n>**IMPORTANT:** When deploying with private storage account access, ensure the following network prerequisites are met:\n> - **Virtual Network**: An existing Virtual Network (VNet) must be available\n> - **Subnet**: A dedicated subnet within the VNet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration\n> - **Subnet Delegation**: Configure the subnet delegation using Azure Portal, ARM template, or Azure CLI:\n> - Azure Portal: Go to Virtual networks \u2192 Select your VNet \u2192 Subnets \u2192 Select subnet \u2192 Delegate subnet to service \u2192 Choose **Microsoft.Web/serverFarms**\n> - Azure CLI: `az network vnet subnet update --resource-group --vnet-name --name --delegations Microsoft.Web/serverFarms`\n> - **Private Endpoints**: The deployment will create private endpoints for storage account services (blob, file, queue, table) within the same subnet""}, {""title"": """", ""description"": ""**STEP 2 - Configuration of the Cisco Umbrella logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Umbrella data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Umbrella data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n4. **For Private Access Deployment**: Also enter **existingVnetName**, **existingVnetResourceGroupName**, and **existingSubnetName** (ensure subnet is delegated to Microsoft.Web/serverFarms)\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n5. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n6. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Umbrella data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}, {""name"": ""Virtual Network permissions (for private access)"", ""description"": ""For private storage account access, **Network Contributor** permissions are required on the Virtual Network and subnet. The subnet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" +"Cisco_Umbrella_cloudfirewall_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [cisco umbrella log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Network Prerequisites for Private Access**\n\n>**IMPORTANT:** When deploying with private storage account access, ensure the following network prerequisites are met:\n> - **Virtual Network**: An existing Virtual Network (VNet) must be available\n> - **Subnet**: A dedicated subnet within the VNet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration\n> - **Subnet Delegation**: Configure the subnet delegation using Azure Portal, ARM template, or Azure CLI:\n> - Azure Portal: Go to Virtual networks \u2192 Select your VNet \u2192 Subnets \u2192 Select subnet \u2192 Delegate subnet to service \u2192 Choose **Microsoft.Web/serverFarms**\n> - Azure CLI: `az network vnet subnet update --resource-group --vnet-name --name --delegations Microsoft.Web/serverFarms`\n> - **Private Endpoints**: The deployment will create private endpoints for storage account services (blob, file, queue, table) within the same subnet""}, {""title"": """", ""description"": ""**STEP 2 - Configuration of the Cisco Umbrella logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Umbrella data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Umbrella data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n4. **For Private Access Deployment**: Also enter **existingVnetName**, **existingVnetResourceGroupName**, and **existingSubnetName** (ensure subnet is delegated to Microsoft.Web/serverFarms)\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n5. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n6. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Umbrella data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}, {""name"": ""Virtual Network permissions (for private access)"", ""description"": ""For private storage account access, **Network Contributor** permissions are required on the Virtual Network and subnet. The subnet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" +"Cisco_Umbrella_dlp_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [cisco umbrella log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Network Prerequisites for Private Access**\n\n>**IMPORTANT:** When deploying with private storage account access, ensure the following network prerequisites are met:\n> - **Virtual Network**: An existing Virtual Network (VNet) must be available\n> - **Subnet**: A dedicated subnet within the VNet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration\n> - **Subnet Delegation**: Configure the subnet delegation using Azure Portal, ARM template, or Azure CLI:\n> - Azure Portal: Go to Virtual networks \u2192 Select your VNet \u2192 Subnets \u2192 Select subnet \u2192 Delegate subnet to service \u2192 Choose **Microsoft.Web/serverFarms**\n> - Azure CLI: `az network vnet subnet update --resource-group --vnet-name --name --delegations Microsoft.Web/serverFarms`\n> - **Private Endpoints**: The deployment will create private endpoints for storage account services (blob, file, queue, table) within the same subnet""}, {""title"": """", ""description"": ""**STEP 2 - Configuration of the Cisco Umbrella logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Umbrella data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Umbrella data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n4. **For Private Access Deployment**: Also enter **existingVnetName**, **existingVnetResourceGroupName**, and **existingSubnetName** (ensure subnet is delegated to Microsoft.Web/serverFarms)\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n5. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n6. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Umbrella data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}, {""name"": ""Virtual Network permissions (for private access)"", ""description"": ""For private storage account access, **Network Contributor** permissions are required on the Virtual Network and subnet. The subnet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" +"Cisco_Umbrella_dns_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [cisco umbrella log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Network Prerequisites for Private Access**\n\n>**IMPORTANT:** When deploying with private storage account access, ensure the following network prerequisites are met:\n> - **Virtual Network**: An existing Virtual Network (VNet) must be available\n> - **Subnet**: A dedicated subnet within the VNet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration\n> - **Subnet Delegation**: Configure the subnet delegation using Azure Portal, ARM template, or Azure CLI:\n> - Azure Portal: Go to Virtual networks \u2192 Select your VNet \u2192 Subnets \u2192 Select subnet \u2192 Delegate subnet to service \u2192 Choose **Microsoft.Web/serverFarms**\n> - Azure CLI: `az network vnet subnet update --resource-group --vnet-name --name --delegations Microsoft.Web/serverFarms`\n> - **Private Endpoints**: The deployment will create private endpoints for storage account services (blob, file, queue, table) within the same subnet""}, {""title"": """", ""description"": ""**STEP 2 - Configuration of the Cisco Umbrella logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Umbrella data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Umbrella data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n4. **For Private Access Deployment**: Also enter **existingVnetName**, **existingVnetResourceGroupName**, and **existingSubnetName** (ensure subnet is delegated to Microsoft.Web/serverFarms)\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n5. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n6. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Umbrella data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}, {""name"": ""Virtual Network permissions (for private access)"", ""description"": ""For private storage account access, **Network Contributor** permissions are required on the Virtual Network and subnet. The subnet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" +"Cisco_Umbrella_fileevent_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [cisco umbrella log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Network Prerequisites for Private Access**\n\n>**IMPORTANT:** When deploying with private storage account access, ensure the following network prerequisites are met:\n> - **Virtual Network**: An existing Virtual Network (VNet) must be available\n> - **Subnet**: A dedicated subnet within the VNet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration\n> - **Subnet Delegation**: Configure the subnet delegation using Azure Portal, ARM template, or Azure CLI:\n> - Azure Portal: Go to Virtual networks \u2192 Select your VNet \u2192 Subnets \u2192 Select subnet \u2192 Delegate subnet to service \u2192 Choose **Microsoft.Web/serverFarms**\n> - Azure CLI: `az network vnet subnet update --resource-group --vnet-name --name --delegations Microsoft.Web/serverFarms`\n> - **Private Endpoints**: The deployment will create private endpoints for storage account services (blob, file, queue, table) within the same subnet""}, {""title"": """", ""description"": ""**STEP 2 - Configuration of the Cisco Umbrella logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Umbrella data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Umbrella data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n4. **For Private Access Deployment**: Also enter **existingVnetName**, **existingVnetResourceGroupName**, and **existingSubnetName** (ensure subnet is delegated to Microsoft.Web/serverFarms)\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n5. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n6. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Umbrella data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}, {""name"": ""Virtual Network permissions (for private access)"", ""description"": ""For private storage account access, **Network Contributor** permissions are required on the Virtual Network and subnet. The subnet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" +"Cisco_Umbrella_firewall_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [cisco umbrella log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Network Prerequisites for Private Access**\n\n>**IMPORTANT:** When deploying with private storage account access, ensure the following network prerequisites are met:\n> - **Virtual Network**: An existing Virtual Network (VNet) must be available\n> - **Subnet**: A dedicated subnet within the VNet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration\n> - **Subnet Delegation**: Configure the subnet delegation using Azure Portal, ARM template, or Azure CLI:\n> - Azure Portal: Go to Virtual networks \u2192 Select your VNet \u2192 Subnets \u2192 Select subnet \u2192 Delegate subnet to service \u2192 Choose **Microsoft.Web/serverFarms**\n> - Azure CLI: `az network vnet subnet update --resource-group --vnet-name --name --delegations Microsoft.Web/serverFarms`\n> - **Private Endpoints**: The deployment will create private endpoints for storage account services (blob, file, queue, table) within the same subnet""}, {""title"": """", ""description"": ""**STEP 2 - Configuration of the Cisco Umbrella logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Umbrella data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Umbrella data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n4. **For Private Access Deployment**: Also enter **existingVnetName**, **existingVnetResourceGroupName**, and **existingSubnetName** (ensure subnet is delegated to Microsoft.Web/serverFarms)\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n5. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n6. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Umbrella data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}, {""name"": ""Virtual Network permissions (for private access)"", ""description"": ""For private storage account access, **Network Contributor** permissions are required on the Virtual Network and subnet. The subnet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" +"Cisco_Umbrella_intrusion_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [cisco umbrella log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Network Prerequisites for Private Access**\n\n>**IMPORTANT:** When deploying with private storage account access, ensure the following network prerequisites are met:\n> - **Virtual Network**: An existing Virtual Network (VNet) must be available\n> - **Subnet**: A dedicated subnet within the VNet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration\n> - **Subnet Delegation**: Configure the subnet delegation using Azure Portal, ARM template, or Azure CLI:\n> - Azure Portal: Go to Virtual networks \u2192 Select your VNet \u2192 Subnets \u2192 Select subnet \u2192 Delegate subnet to service \u2192 Choose **Microsoft.Web/serverFarms**\n> - Azure CLI: `az network vnet subnet update --resource-group --vnet-name --name --delegations Microsoft.Web/serverFarms`\n> - **Private Endpoints**: The deployment will create private endpoints for storage account services (blob, file, queue, table) within the same subnet""}, {""title"": """", ""description"": ""**STEP 2 - Configuration of the Cisco Umbrella logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Umbrella data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Umbrella data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n4. **For Private Access Deployment**: Also enter **existingVnetName**, **existingVnetResourceGroupName**, and **existingSubnetName** (ensure subnet is delegated to Microsoft.Web/serverFarms)\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n5. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n6. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Umbrella data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}, {""name"": ""Virtual Network permissions (for private access)"", ""description"": ""For private storage account access, **Network Contributor** permissions are required on the Virtual Network and subnet. The subnet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" +"Cisco_Umbrella_ip_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [cisco umbrella log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Network Prerequisites for Private Access**\n\n>**IMPORTANT:** When deploying with private storage account access, ensure the following network prerequisites are met:\n> - **Virtual Network**: An existing Virtual Network (VNet) must be available\n> - **Subnet**: A dedicated subnet within the VNet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration\n> - **Subnet Delegation**: Configure the subnet delegation using Azure Portal, ARM template, or Azure CLI:\n> - Azure Portal: Go to Virtual networks \u2192 Select your VNet \u2192 Subnets \u2192 Select subnet \u2192 Delegate subnet to service \u2192 Choose **Microsoft.Web/serverFarms**\n> - Azure CLI: `az network vnet subnet update --resource-group --vnet-name --name --delegations Microsoft.Web/serverFarms`\n> - **Private Endpoints**: The deployment will create private endpoints for storage account services (blob, file, queue, table) within the same subnet""}, {""title"": """", ""description"": ""**STEP 2 - Configuration of the Cisco Umbrella logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Umbrella data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Umbrella data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n4. **For Private Access Deployment**: Also enter **existingVnetName**, **existingVnetResourceGroupName**, and **existingSubnetName** (ensure subnet is delegated to Microsoft.Web/serverFarms)\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n5. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n6. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Umbrella data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}, {""name"": ""Virtual Network permissions (for private access)"", ""description"": ""For private storage account access, **Network Contributor** permissions are required on the Virtual Network and subnet. The subnet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" +"Cisco_Umbrella_proxy_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [cisco umbrella log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Network Prerequisites for Private Access**\n\n>**IMPORTANT:** When deploying with private storage account access, ensure the following network prerequisites are met:\n> - **Virtual Network**: An existing Virtual Network (VNet) must be available\n> - **Subnet**: A dedicated subnet within the VNet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration\n> - **Subnet Delegation**: Configure the subnet delegation using Azure Portal, ARM template, or Azure CLI:\n> - Azure Portal: Go to Virtual networks \u2192 Select your VNet \u2192 Subnets \u2192 Select subnet \u2192 Delegate subnet to service \u2192 Choose **Microsoft.Web/serverFarms**\n> - Azure CLI: `az network vnet subnet update --resource-group --vnet-name --name --delegations Microsoft.Web/serverFarms`\n> - **Private Endpoints**: The deployment will create private endpoints for storage account services (blob, file, queue, table) within the same subnet""}, {""title"": """", ""description"": ""**STEP 2 - Configuration of the Cisco Umbrella logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Umbrella data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Umbrella data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n4. **For Private Access Deployment**: Also enter **existingVnetName**, **existingVnetResourceGroupName**, and **existingSubnetName** (ensure subnet is delegated to Microsoft.Web/serverFarms)\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n5. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n6. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Umbrella data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}, {""name"": ""Virtual Network permissions (for private access)"", ""description"": ""For private storage account access, **Network Contributor** permissions are required on the Virtual Network and subnet. The subnet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" +"Cisco_Umbrella_ravpnlogs_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [cisco umbrella log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Network Prerequisites for Private Access**\n\n>**IMPORTANT:** When deploying with private storage account access, ensure the following network prerequisites are met:\n> - **Virtual Network**: An existing Virtual Network (VNet) must be available\n> - **Subnet**: A dedicated subnet within the VNet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration\n> - **Subnet Delegation**: Configure the subnet delegation using Azure Portal, ARM template, or Azure CLI:\n> - Azure Portal: Go to Virtual networks \u2192 Select your VNet \u2192 Subnets \u2192 Select subnet \u2192 Delegate subnet to service \u2192 Choose **Microsoft.Web/serverFarms**\n> - Azure CLI: `az network vnet subnet update --resource-group --vnet-name --name --delegations Microsoft.Web/serverFarms`\n> - **Private Endpoints**: The deployment will create private endpoints for storage account services (blob, file, queue, table) within the same subnet""}, {""title"": """", ""description"": ""**STEP 2 - Configuration of the Cisco Umbrella logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Umbrella data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Umbrella data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n4. **For Private Access Deployment**: Also enter **existingVnetName**, **existingVnetResourceGroupName**, and **existingSubnetName** (ensure subnet is delegated to Microsoft.Web/serverFarms)\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n5. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n6. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Umbrella data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}, {""name"": ""Virtual Network permissions (for private access)"", ""description"": ""For private storage account access, **Network Contributor** permissions are required on the Virtual Network and subnet. The subnet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" +"Cisco_Umbrella_ztaflow_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [cisco umbrella log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Network Prerequisites for Private Access**\n\n>**IMPORTANT:** When deploying with private storage account access, ensure the following network prerequisites are met:\n> - **Virtual Network**: An existing Virtual Network (VNet) must be available\n> - **Subnet**: A dedicated subnet within the VNet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration\n> - **Subnet Delegation**: Configure the subnet delegation using Azure Portal, ARM template, or Azure CLI:\n> - Azure Portal: Go to Virtual networks \u2192 Select your VNet \u2192 Subnets \u2192 Select subnet \u2192 Delegate subnet to service \u2192 Choose **Microsoft.Web/serverFarms**\n> - Azure CLI: `az network vnet subnet update --resource-group --vnet-name --name --delegations Microsoft.Web/serverFarms`\n> - **Private Endpoints**: The deployment will create private endpoints for storage account services (blob, file, queue, table) within the same subnet""}, {""title"": """", ""description"": ""**STEP 2 - Configuration of the Cisco Umbrella logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Umbrella data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Umbrella data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n4. **For Private Access Deployment**: Also enter **existingVnetName**, **existingVnetResourceGroupName**, and **existingSubnetName** (ensure subnet is delegated to Microsoft.Web/serverFarms)\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n5. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n6. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Umbrella data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}, {""name"": ""Virtual Network permissions (for private access)"", ""description"": ""For private storage account access, **Network Contributor** permissions are required on the Virtual Network and subnet. The subnet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" +"Cisco_Umbrella_ztna_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [cisco umbrella log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Network Prerequisites for Private Access**\n\n>**IMPORTANT:** When deploying with private storage account access, ensure the following network prerequisites are met:\n> - **Virtual Network**: An existing Virtual Network (VNet) must be available\n> - **Subnet**: A dedicated subnet within the VNet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration\n> - **Subnet Delegation**: Configure the subnet delegation using Azure Portal, ARM template, or Azure CLI:\n> - Azure Portal: Go to Virtual networks \u2192 Select your VNet \u2192 Subnets \u2192 Select subnet \u2192 Delegate subnet to service \u2192 Choose **Microsoft.Web/serverFarms**\n> - Azure CLI: `az network vnet subnet update --resource-group --vnet-name --name --delegations Microsoft.Web/serverFarms`\n> - **Private Endpoints**: The deployment will create private endpoints for storage account services (blob, file, queue, table) within the same subnet""}, {""title"": """", ""description"": ""**STEP 2 - Configuration of the Cisco Umbrella logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Umbrella data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Umbrella data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n4. **For Private Access Deployment**: Also enter **existingVnetName**, **existingVnetResourceGroupName**, and **existingSubnetName** (ensure subnet is delegated to Microsoft.Web/serverFarms)\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n5. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n6. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Umbrella data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}, {""name"": ""Virtual Network permissions (for private access)"", ""description"": ""For private storage account access, **Network Contributor** permissions are required on the Virtual Network and subnet. The subnet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" +"Syslog","CiscoWSA","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoWSA","azuresentinel","azure-sentinel-solution-ciscowsa","2021-06-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoWSA","Cisco","[Deprecated] Cisco Web Security Appliance","[Cisco Web Security Appliance (WSA)](https://www.cisco.com/c/en/us/products/security/web-security-appliance/index.html) data connector provides the capability to ingest [Cisco WSA Access Logs](https://www.cisco.com/c/en/us/td/docs/security/wsa/wsa_14-0/User-Guide/b_WSA_UserGuide_14_0/b_WSA_UserGuide_11_7_chapter_010101.html) into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**CiscoWSAEvent**](https://aka.ms/sentinel-CiscoWSA-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using AsyncOS 14.0 for Cisco Web Security Appliance"", ""instructions"": []}, {""title"": ""1. Configure Cisco Web Security Appliance to forward logs via Syslog to remote server where you will install the agent."", ""description"": ""[Follow these steps](https://www.cisco.com/c/en/us/td/docs/security/esa/esa14-0/user_guide/b_ESA_Admin_Guide_14-0/b_ESA_Admin_Guide_12_1_chapter_0100111.html#con_1134718) to configure Cisco Web Security Appliance to forward logs via Syslog\n\n>**NOTE:** Select **Syslog Push** as a Retrieval Method.""}, {""title"": ""2. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Server to which the logs will be forwarded.\n\n> Logs on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""3. Check logs in Microsoft Sentinel"", ""description"": ""Open Log Analytics to check if the logs are received using the Syslog schema.\n\n>**NOTE:** It may take up to 15 minutes before new logs will appear in Syslog table."", ""instructions"": []}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoWSA/Data%20Connectors/Connector_WSA_Syslog.json","true" +"Syslog","Citrix ADC","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20ADC","azuresentinel","azure-sentinel-solution-citrixadc","2022-06-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CitrixADC","Citrix","[Deprecated] Citrix ADC (former NetScaler)","The [Citrix ADC (former NetScaler)](https://www.citrix.com/products/citrix-adc/) data connector provides the capability to ingest Citrix ADC logs into Microsoft Sentinel. If you want to ingest Citrix WAF logs into Microsoft Sentinel, refer this [documentation](https://learn.microsoft.com/azure/sentinel/data-connectors/citrix-waf-web-app-firewall)","[{""title"": """", ""description"": "">**NOTE:** 1. This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias CitrixADCEvent and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20ADC/Parsers/CitrixADCEvent.yaml), this function maps Citrix ADC (former NetScaler) events to Advanced Security Information Model [ASIM](https://docs.microsoft.com/azure/sentinel/normalization). The function usually takes 10-15 minutes to activate after solution installation/update. \n\n>**NOTE:** 2. This parser requires a watchlist named **`Sources_by_SourceType`** \n\n> i. If you don't have watchlist already created, please click [here](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FASIM%2Fdeploy%2FWatchlists%2FASimSourceType.json) to create. \n\n> ii. Open watchlist **`Sources_by_SourceType`** and add entries for this data source.\n\n> iii. The SourceType value for CitrixADC is **`CitrixADC`**. \n\n> You can refer [this](https://learn.microsoft.com/en-us/azure/sentinel/normalization-manage-parsers?WT.mc_id=Portal-fx#configure-the-sources-relevant-to-a-source-specific-parser) documentation for more details"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n 2. Select **Apply below configuration to my machines** and select the facilities and severities.\n 3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure Citrix ADC to forward logs via Syslog"", ""description"": ""3.1 Navigate to **Configuration tab > System > Auditing > Syslog > Servers tab**\n\n 3.2 Specify **Syslog action name**.\n\n 3.3 Set IP address of remote Syslog server and port.\n\n 3.4 Set **Transport type** as **TCP** or **UDP** depending on your remote Syslog server configuration.\n\n 3.5 You can refer Citrix ADC (former NetScaler) [documentation](https://docs.netscaler.com/) for more details.""}, {""title"": ""4. Check logs in Microsoft Sentinel"", ""description"": ""Open Log Analytics to check if the logs are received using the Syslog schema.\n\n>**NOTE:** It may take up to 15 minutes before new logs will appear in Syslog table."", ""instructions"": []}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20ADC/Data%20Connectors/Connector_CitrixADC_syslog.json","true" +"CitrixAnalytics_indicatorEventDetails_CL","Citrix Analytics for Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Analytics%20for%20Security","citrix","citrix_analytics_for_security_mss","2022-05-06","","","Citrix Systems","Partner","https://www.citrix.com/support/","","domains","Citrix","CITRIX","CITRIX SECURITY ANALYTICS","Citrix Analytics (Security) integration with Microsoft Sentinel helps you to export data analyzed for risky events from Citrix Analytics (Security) into Microsoft Sentinel environment. You can create custom dashboards, analyze data from other sources along with that from Citrix Analytics (Security) and create custom workflows using Logic Apps to monitor and mitigate security events.","[{""title"": """", ""description"": ""To get access to this capability and the configuration steps on Citrix Analytics, please visit: [Connect Citrix to Microsoft Sentinel.](https://aka.ms/Sentinel-Citrix-Connector)\u200b\n"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Licensing"", ""description"": ""Entitlements to Citrix Security Analytics in Citrix Cloud. Please review [Citrix Tool License Agreement.](https://aka.ms/sentinel-citrixanalyticslicense-readme)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Analytics%20for%20Security/Data%20Connectors/CitrixSecurityAnalytics.json","true" +"CitrixAnalytics_indicatorSummary_CL","Citrix Analytics for Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Analytics%20for%20Security","citrix","citrix_analytics_for_security_mss","2022-05-06","","","Citrix Systems","Partner","https://www.citrix.com/support/","","domains","Citrix","CITRIX","CITRIX SECURITY ANALYTICS","Citrix Analytics (Security) integration with Microsoft Sentinel helps you to export data analyzed for risky events from Citrix Analytics (Security) into Microsoft Sentinel environment. You can create custom dashboards, analyze data from other sources along with that from Citrix Analytics (Security) and create custom workflows using Logic Apps to monitor and mitigate security events.","[{""title"": """", ""description"": ""To get access to this capability and the configuration steps on Citrix Analytics, please visit: [Connect Citrix to Microsoft Sentinel.](https://aka.ms/Sentinel-Citrix-Connector)\u200b\n"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Licensing"", ""description"": ""Entitlements to Citrix Security Analytics in Citrix Cloud. Please review [Citrix Tool License Agreement.](https://aka.ms/sentinel-citrixanalyticslicense-readme)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Analytics%20for%20Security/Data%20Connectors/CitrixSecurityAnalytics.json","true" +"CitrixAnalytics_riskScoreChange_CL","Citrix Analytics for Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Analytics%20for%20Security","citrix","citrix_analytics_for_security_mss","2022-05-06","","","Citrix Systems","Partner","https://www.citrix.com/support/","","domains","Citrix","CITRIX","CITRIX SECURITY ANALYTICS","Citrix Analytics (Security) integration with Microsoft Sentinel helps you to export data analyzed for risky events from Citrix Analytics (Security) into Microsoft Sentinel environment. You can create custom dashboards, analyze data from other sources along with that from Citrix Analytics (Security) and create custom workflows using Logic Apps to monitor and mitigate security events.","[{""title"": """", ""description"": ""To get access to this capability and the configuration steps on Citrix Analytics, please visit: [Connect Citrix to Microsoft Sentinel.](https://aka.ms/Sentinel-Citrix-Connector)\u200b\n"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Licensing"", ""description"": ""Entitlements to Citrix Security Analytics in Citrix Cloud. Please review [Citrix Tool License Agreement.](https://aka.ms/sentinel-citrixanalyticslicense-readme)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Analytics%20for%20Security/Data%20Connectors/CitrixSecurityAnalytics.json","true" +"CitrixAnalytics_userProfile_CL","Citrix Analytics for Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Analytics%20for%20Security","citrix","citrix_analytics_for_security_mss","2022-05-06","","","Citrix Systems","Partner","https://www.citrix.com/support/","","domains","Citrix","CITRIX","CITRIX SECURITY ANALYTICS","Citrix Analytics (Security) integration with Microsoft Sentinel helps you to export data analyzed for risky events from Citrix Analytics (Security) into Microsoft Sentinel environment. You can create custom dashboards, analyze data from other sources along with that from Citrix Analytics (Security) and create custom workflows using Logic Apps to monitor and mitigate security events.","[{""title"": """", ""description"": ""To get access to this capability and the configuration steps on Citrix Analytics, please visit: [Connect Citrix to Microsoft Sentinel.](https://aka.ms/Sentinel-Citrix-Connector)\u200b\n"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Licensing"", ""description"": ""Entitlements to Citrix Security Analytics in Citrix Cloud. Please review [Citrix Tool License Agreement.](https://aka.ms/sentinel-citrixanalyticslicense-readme)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Analytics%20for%20Security/Data%20Connectors/CitrixSecurityAnalytics.json","true" +"CommonSecurityLog","Citrix Web App Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Web%20App%20Firewall","citrix","citrix_waf_mss","2022-05-06","","","Citrix Systems","Partner","https://www.citrix.com/support/","","domains","CitrixWAF","Citrix Systems Inc.","[Deprecated] Citrix WAF (Web App Firewall) via Legacy Agent"," Citrix WAF (Web App Firewall) is an industry leading enterprise-grade WAF solution. Citrix WAF mitigates threats against your public-facing assets, including websites, apps, and APIs. From layer 3 to layer 7, Citrix WAF includes protections such as IP reputation, bot mitigation, defense against the OWASP Top 10 application threats, built-in signatures to protect against application stack vulnerabilities, and more.

Citrix WAF supports Common Event Format (CEF) which is an industry standard format on top of Syslog messages . By connecting Citrix WAF CEF logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Configure Citrix WAF to send Syslog messages in CEF format to the proxy machine using the steps below. \n\n1. Follow [this guide](https://support.citrix.com/article/CTX234174) to configure WAF.\n\n2. Follow [this guide](https://support.citrix.com/article/CTX136146) to configure CEF logs.\n\n3. Follow [this guide](https://docs.citrix.com/en-us/citrix-adc/13/system/audit-logging/configuring-audit-logging.html) to forward the logs to proxy . Make sure you to send the logs to port 514 TCP on the Linux machine's IP address.\n\n""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Web%20App%20Firewall/Data%20Connectors/Citrix_WAF.json","true" +"CommonSecurityLog","Citrix Web App Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Web%20App%20Firewall","citrix","citrix_waf_mss","2022-05-06","","","Citrix Systems","Partner","https://www.citrix.com/support/","","domains","CitrixWAFAma","Citrix Systems Inc.","[Deprecated] Citrix WAF (Web App Firewall) via AMA"," Citrix WAF (Web App Firewall) is an industry leading enterprise-grade WAF solution. Citrix WAF mitigates threats against your public-facing assets, including websites, apps, and APIs. From layer 3 to layer 7, Citrix WAF includes protections such as IP reputation, bot mitigation, defense against the OWASP Top 10 application threats, built-in signatures to protect against application stack vulnerabilities, and more.

Citrix WAF supports Common Event Format (CEF) which is an industry standard format on top of Syslog messages . By connecting Citrix WAF CEF logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Configure Citrix WAF to send Syslog messages in CEF format to the proxy machine using the steps below. \n\n1. Follow [this guide](https://support.citrix.com/article/CTX234174) to configure WAF.\n\n2. Follow [this guide](https://support.citrix.com/article/CTX136146) to configure CEF logs.\n\n3. Follow [this guide](https://docs.citrix.com/en-us/citrix-adc/13/system/audit-logging/configuring-audit-logging.html) to forward the logs to proxy . Make sure you to send the logs to port 514 TCP on the Linux machine's IP address.\n\n"", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Web%20App%20Firewall/Data%20Connectors/template_Citrix_WAFAMA.json","true" +"CommonSecurityLog","Claroty","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Claroty","azuresentinel","azure-sentinel-solution-claroty","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Claroty","Claroty","[Deprecated] Claroty via Legacy Agent","The [Claroty](https://claroty.com/) data connector provides the capability to ingest [Continuous Threat Detection](https://claroty.com/resources/datasheets/continuous-threat-detection) and [Secure Remote Access](https://claroty.com/industrial-cybersecurity/sra) events into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**ClarotyEvent**](https://aka.ms/sentinel-claroty-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Configure Claroty to send logs using CEF"", ""description"": ""Configure log forwarding using CEF:\n\n1. Navigate to the **Syslog** section of the Configuration menu.\n\n2. Select **+Add**.\n\n3. In the **Add New Syslog Dialog** specify Remote Server **IP**, **Port**, **Protocol** and select **Message Format** - **CEF**.\n\n4. Choose **Save** to exit the **Add Syslog dialog**.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Claroty/Data%20Connectors/Connector_Claroty_CEF.json","true" +"CommonSecurityLog","Claroty","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Claroty","azuresentinel","azure-sentinel-solution-claroty","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ClarotyAma","Claroty","[Deprecated] Claroty via AMA","The [Claroty](https://claroty.com/) data connector provides the capability to ingest [Continuous Threat Detection](https://claroty.com/resources/datasheets/continuous-threat-detection) and [Secure Remote Access](https://claroty.com/industrial-cybersecurity/sra) events into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**ClarotyEvent**](https://aka.ms/sentinel-claroty-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Configure Claroty to send logs using CEF"", ""description"": ""Configure log forwarding using CEF:\n\n1. Navigate to the **Syslog** section of the Configuration menu.\n\n2. Select **+Add**.\n\n3. In the **Add New Syslog Dialog** specify Remote Server **IP**, **Port**, **Protocol** and select **Message Format** - **CEF**.\n\n4. Choose **Save** to exit the **Add Syslog dialog**."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Claroty/Data%20Connectors/template_ClarotyAMA.json","true" +"CommonSecurityLog","Claroty xDome","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Claroty%20xDome","claroty1709722359369","microsoft-sentinel-solution-xdome","2024-02-01","","","xDome Customer Support","Partner","https://claroty.com/support-policy","","domains,verticals","ClarotyxDome","Claroty","Claroty xDome","[Claroty](https://claroty.com/) xDome delivers comprehensive security and alert management capabilities for healthcare and industrial network environments. It is designed to map multiple source types, identify the collected data, and integrate it into Microsoft Sentinel data models. This results in the ability to monitor all potential threats in your healthcare and industrial environments in one location, leading to more effective security monitoring and a stronger security posture.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python --version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Configure the Claroty xDome - Microsoft Sentinel integration to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python --version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Claroty%20xDome/Data%20Connectors/Claroty_xDome.json","true" +"","Cloud Identity Threat Protection Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cloud%20Identity%20Threat%20Protection%20Essentials","azuresentinel","azure-sentinel-solution-cloudthreatdetection","2022-11-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","Cloud Service Threat Protection Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cloud%20Service%20Threat%20Protection%20Essentials","azuresentinel","azure-sentinel-solution-cloudservicedetection","2022-11-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"Cloudflare_CL","Cloudflare","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cloudflare","cloudflare","cloudflare_sentinel","2021-10-20","","","Cloudflare","Partner","https://support.cloudflare.com","","domains","CloudflareDataConnector","Cloudflare","[DEPRECATED] Cloudflare","The Cloudflare data connector provides the capability to ingest [Cloudflare logs](https://developers.cloudflare.com/logs/) into Microsoft Sentinel using the Cloudflare Logpush and Azure Blob Storage. Refer to [Cloudflare documentation](https://developers.cloudflare.com/logs/logpush) for more information.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Azure Blob Storage API to pull logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Cloudflare**](https://aka.ms/sentinel-CloudflareDataConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Cloudflare Logpush**\n\nSee documentation to [setup Cloudflare Logpush to Microsoft Azure](https://developers.cloudflare.com/logs/logpush/logpush-dashboard)""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Cloudflare data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Azure Blob Storage connection string and container name, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cloudflare data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CloudflareDataConnector-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Azure Blob Storage Container Name**, **Azure Blob Storage Connection String**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cloudflare data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CloudflareDataConnector-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CloudflareXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tCONTAINER_NAME\n\t\tAZURE_STORAGE_CONNECTION_STRING\n\t\tWORKSPACE_ID\n\t\tSHARED_KEY\n\t\tlogAnalyticsUri (Optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Azure Blob Storage connection string and container name"", ""description"": ""Azure Blob Storage connection string and container name where the logs are pushed to by Cloudflare Logpush. [See the documentation to learn more about creating Azure Blob Storage container.](https://learn.microsoft.com/azure/storage/blobs/storage-quickstart-blobs-portal)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cloudflare/Data%20Connectors/Cloudflare_API_FunctionApp.json","true" +"CloudflareV2_CL","Cloudflare","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cloudflare","cloudflare","cloudflare_sentinel","2021-10-20","","","Cloudflare","Partner","https://support.cloudflare.com","","domains","CloudflareDefinition","Microsoft","Cloudflare (Using Blob Container) (via Codeless Connector Framework)"," The Cloudflare data connector provides the capability to ingest Cloudflare logs into Microsoft Sentinel using the Cloudflare Logpush and Azure Blob Storage. Refer to [Cloudflare documentation](https://developers.cloudflare.com/logs/about/)for more information.","[{""title"": ""Connect Cloudflare Logs to Microsoft Sentinel"", ""description"": ""To enable Cloudflare logs for Microsoft Sentinel, provide the required information below and click on Connect.\n>"", ""instructions"": [{""parameters"": {""tenantId"": ""[subscription().tenantId]"", ""name"": ""principalId"", ""appId"": ""4f05ce56-95b6-4612-9d98-a45c8cc33f9f""}, ""type"": ""ServicePrincipalIDTextBox_test""}, {""parameters"": {""label"": ""The Blob container's URL you want to collect data from"", ""type"": ""text"", ""name"": ""blobContainerUri"", ""validations"": {""required"": true}}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""The Blob container's storage account resource group name"", ""type"": ""text"", ""name"": ""StorageAccountResourceGroupName"", ""validations"": {""required"": true}}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""The Blob container's storage account location"", ""type"": ""text"", ""name"": ""StorageAccountLocation"", ""validations"": {""required"": true}}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""The Blob container's storage account subscription id"", ""type"": ""text"", ""name"": ""StorageAccountSubscription"", ""validations"": {""required"": true}}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""The event grid topic name of the blob container's storage account if exist. else keep empty."", ""description"": ""The data flow using event grid to send 'blob-created event' notifications. There could be only one event grid topic for each storage account.\nGo to your blob container's storage account and look in the 'Events' section. If you already have a topic, please provide it's name. Else, keep the text box empty."", ""type"": ""text"", ""name"": ""EGSystemTopicName"", ""validations"": {""required"": false}}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}], ""customs"": [{""name"": ""Create a storage account and a container"", ""description"": ""Before setting up logpush in Cloudflare, first create a storage account and a container in Microsoft Azure. Use [this guide](https://learn.microsoft.com/en-us/azure/storage/blobs/storage-blobs-introduction) to know more about Container and Blob. Follow the steps in the [documentation](https://learn.microsoft.com/en-us/azure/storage/common/storage-account-create?tabs=azure-portal) to create an Azure Storage account.""}, {""name"": ""Generate a Blob SAS URL"", ""description"": ""Create and Write permissions are required. Refer the [documentation](https://learn.microsoft.com/en-us/azure/ai-services/translator/document-translation/how-to-guides/create-sas-tokens?tabs=Containers) to know more about Blob SAS token and url.""}, {""name"": ""Collecting logs from Cloudflare to your Blob container"", ""description"": ""Follow the steps in the [documentation](https://developers.cloudflare.com/logs/get-started/enable-destinations/azure/) for collecting logs from Cloudflare to your Blob container.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cloudflare/Data%20Connectors/CloudflareLog_CCF/CloudflareLog_ConnectorDefinition.json","true" +"CloudflareV2_CL","Cloudflare CCF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cloudflare%20CCF","cloudflare","azure-sentinel-solution-cloudflare-ccf","2025-09-30","","","Cloudflare","Partner","https://support.cloudflare.com","","domains","CloudflareDefinition","Microsoft","Cloudflare (Using Blob Container) (via Codeless Connector Framework)"," The Cloudflare data connector provides the capability to ingest Cloudflare logs into Microsoft Sentinel using the Cloudflare Logpush and Azure Blob Storage. Refer to [Cloudflare documentation](https://developers.cloudflare.com/logs/about/)for more information.","[{""title"": ""Connect Cloudflare Logs to Microsoft Sentinel"", ""description"": ""To enable Cloudflare logs for Microsoft Sentinel, provide the required information below and click on Connect.\n>"", ""instructions"": [{""parameters"": {""tenantId"": ""[subscription().tenantId]"", ""name"": ""principalId"", ""appId"": ""4f05ce56-95b6-4612-9d98-a45c8cc33f9f""}, ""type"": ""ServicePrincipalIDTextBox_test""}, {""parameters"": {""label"": ""The Blob container's URL you want to collect data from"", ""type"": ""text"", ""name"": ""blobContainerUri"", ""validations"": {""required"": true}}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""The Blob container's storage account resource group name"", ""type"": ""text"", ""name"": ""StorageAccountResourceGroupName"", ""validations"": {""required"": true}}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""The Blob container's storage account location"", ""type"": ""text"", ""name"": ""StorageAccountLocation"", ""validations"": {""required"": true}}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""The Blob container's storage account subscription id"", ""type"": ""text"", ""name"": ""StorageAccountSubscription"", ""validations"": {""required"": true}}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""The event grid topic name of the blob container's storage account if exist. else keep empty."", ""description"": ""The data flow using event grid to send 'blob-created event' notifications. There could be only one event grid topic for each storage account.\nGo to your blob container's storage account and look in the 'Events' section. If you already have a topic, please provide it's name. Else, keep the text box empty."", ""type"": ""text"", ""name"": ""EGSystemTopicName"", ""validations"": {""required"": false}}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}], ""customs"": [{""name"": ""Create a storage account and a container"", ""description"": ""Before setting up logpush in Cloudflare, first create a storage account and a container in Microsoft Azure. Use [this guide](https://learn.microsoft.com/en-us/azure/storage/blobs/storage-blobs-introduction) to know more about Container and Blob. Follow the steps in the [documentation](https://learn.microsoft.com/en-us/azure/storage/common/storage-account-create?tabs=azure-portal) to create an Azure Storage account.""}, {""name"": ""Generate a Blob SAS URL"", ""description"": ""Create and Write permissions are required. Refer the [documentation](https://learn.microsoft.com/en-us/azure/ai-services/translator/document-translation/how-to-guides/create-sas-tokens?tabs=Containers) to know more about Blob SAS token and url.""}, {""name"": ""Collecting logs from Cloudflare to your Blob container"", ""description"": ""Follow the steps in the [documentation](https://developers.cloudflare.com/logs/get-started/enable-destinations/azure/) for collecting logs from Cloudflare to your Blob container.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cloudflare%20CCF/Data%20Connectors/CloudflareLog_CCF/CloudflareLog_ConnectorDefinition.json","true" +"Malware_Data_CL","CofenseIntelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseIntelligence","cofense","cofense-intelligence-sentinel","2023-05-26","2024-05-26","","Cofense Support","Partner","https://cofense.com/contact-support/","","domains","CofenseIntelligence","Cofense","Cofense Intelligence Threat Indicators Ingestion","The [Cofense-Intelligence](https://cofense.com/product-services/phishing-intelligence/) data connector provides the following capabilities:
1. CofenseToSentinel :
>* Get Threat Indicators from the Cofense Intelligence platform and create Threat Intelligence Indicators in Microsoft Sentinel.
2. SentinelToDefender :
>* Get Malware from Cofense Intelligence and post to custom logs table.
3. CofenseIntelligenceMalware :
>* Get Cofense Intelligence Threat Intelligence Indicators from Microsoft Sentinel Threat Intelligence and create/update Indicators in Microsoft Defender for Endpoints.
4. DownloadThreatReports :
>* This data connector will fetch the malware data and create the Link from which we can download Threat Reports.
5. RetryFailedIndicators :
>* This data connector will fetch failed indicators from failed indicators file and retry creating/updating Threat Intelligence indicators in Microsoft Sentinel.


For more details of REST APIs refer to the below documentations:
1. Cofense Intelligence API documentation:
> https://www.threathq.com/docs/rest_api_reference.html
2. Microsoft Threat Intelligence Indicator documentation:
> https://learn.microsoft.com/rest/api/securityinsights/preview/threat-intelligence-indicator
3. Microsoft Defender for Endpoints Indicator documentation:
> https://learn.microsoft.com/microsoft-365/security/defender-endpoint/ti-indicator?view=o365-worldwide","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Cofense Intelligence APIs to pull its Threat Indicators and create Threat Intelligence Indicators into Microsoft Sentinel Threat Intelligence and create/update Threat Indicators in Cofense. Likewise, it also creates/updates Cofense Based Threat Indicators in Microsoft Defender for Endpoints. All this might result in additional indicator and data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Microsoft Azure Active Directory Application**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new Azure Active Directory application:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Azure Active Directory**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of CofenseIntelligence Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for Microsoft Azure Active Directory Application**\n\n Sometimes called an application password, a client secret is a string value required for the execution of CofenseIntelligence Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of CofenseIntelligence Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to Microsoft Azure Active Directory Application**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Assign Defender Threat Indicator permissions to Microsoft Azure Active Directory Application**\n\n Follow the steps in this section to assign the permissions:\n 1. In the Azure portal, in **App registrations**, select **your application**.\n 2. To enable an app to access Defender for Endpoint indicators, assign it **'Ti.ReadWrite.All'** permission, on your application page, select **API Permissions > Add permission > APIs my organization uses >, type WindowsDefenderATP, and then select WindowsDefenderATP**.\n 3. Select **Application permissions > Ti.ReadWrite.All**, and then select **Add permissions**.\n 4. Select **Grant consent**. \n\n> **Reference link:** [https://docs.microsoft.com/microsoft-365/security/defender-endpoint/exposed-apis-create-app-webapp?view=o365-worldwide](https://docs.microsoft.com/microsoft-365/security/defender-endpoint/exposed-apis-create-app-webapp?view=o365-worldwide)""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create/get Credentials for the Cofense Intelligence account** \n\n Follow the steps in this section to create/get **Cofense Username** and **Password**:\n 1. Login to https://threathq.com and go to the **Settings menu** on the left navigation bar.\n 2. Choose the API Tokens tab and select **Add a New Token**\n 3. Make sure to save the **password**, as it will not be accessible again.""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Cofense Intelligence Threat Indicators data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Cofense API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cofense connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CofenseIntelligence-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tCofense BaseURL (https:///) \n\t\tCofense Username \n\t\tCofense Password \n\t\tAzure Client ID \n\t\tAzure Client Secret \n\t\tAzure Tenant ID \n\t\tAzure Resource Group Name \n\t\tAzure Workspace Name \n\t\tAzure Subscription ID \n\t\tRequireProxy \n\t\tProxy Username (optional) \n\t\tProxy Password (optional) \n\t\tProxy URL (optional) \n\t\tProxy Port (optional) \n\t\tLogLevel (optional) \n\t\tMalware_Data_Table_name\n\t\tSendCofenseIndicatorToDefender \n\t\tSchedule \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cofense Intelligence Threat Indicators data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CofenseIntelligence-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CofenseXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tCofense BaseURL (https:///) \n\t\tCofense Username \n\t\tCofense Password \n\t\tAzure Client ID \n\t\tAzure Client Secret \n\t\tAzure Tenant ID \n\t\tAzure Resource Group Name \n\t\tAzure Workspace Name \n\t\tFunction App Name \n\t\tAzure Subscription ID \n\t\tRequireProxy \n\t\tProxy Username (optional) \n\t\tProxy Password (optional) \n\t\tProxy URL (optional) \n\t\tProxy Port (optional) \n\t\tLogLevel (optional) \n\t\tMalware_Data_Table_name\n\t\tSendCofenseIndicatorToDefender \n\t\tSchedule \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Cofense Username** and **Password** is required. See the documentation to learn more about API on the [Rest API reference](https://www.threathq.com/docs/rest_api_reference.html)""}, {""name"": ""Microsoft Defender for Endpoints"", ""description"": ""**Microsoft Defender for Endpoints License** is required for SentinelToDefender function.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseIntelligence/Data%20Connectors/CofenseIntelligenceDataConnector/CofenseIntelligence_API_FunctionApp.json","true" +"ThreatIntelligenceIndicator","CofenseIntelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseIntelligence","cofense","cofense-intelligence-sentinel","2023-05-26","2024-05-26","","Cofense Support","Partner","https://cofense.com/contact-support/","","domains","CofenseIntelligence","Cofense","Cofense Intelligence Threat Indicators Ingestion","The [Cofense-Intelligence](https://cofense.com/product-services/phishing-intelligence/) data connector provides the following capabilities:
1. CofenseToSentinel :
>* Get Threat Indicators from the Cofense Intelligence platform and create Threat Intelligence Indicators in Microsoft Sentinel.
2. SentinelToDefender :
>* Get Malware from Cofense Intelligence and post to custom logs table.
3. CofenseIntelligenceMalware :
>* Get Cofense Intelligence Threat Intelligence Indicators from Microsoft Sentinel Threat Intelligence and create/update Indicators in Microsoft Defender for Endpoints.
4. DownloadThreatReports :
>* This data connector will fetch the malware data and create the Link from which we can download Threat Reports.
5. RetryFailedIndicators :
>* This data connector will fetch failed indicators from failed indicators file and retry creating/updating Threat Intelligence indicators in Microsoft Sentinel.


For more details of REST APIs refer to the below documentations:
1. Cofense Intelligence API documentation:
> https://www.threathq.com/docs/rest_api_reference.html
2. Microsoft Threat Intelligence Indicator documentation:
> https://learn.microsoft.com/rest/api/securityinsights/preview/threat-intelligence-indicator
3. Microsoft Defender for Endpoints Indicator documentation:
> https://learn.microsoft.com/microsoft-365/security/defender-endpoint/ti-indicator?view=o365-worldwide","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Cofense Intelligence APIs to pull its Threat Indicators and create Threat Intelligence Indicators into Microsoft Sentinel Threat Intelligence and create/update Threat Indicators in Cofense. Likewise, it also creates/updates Cofense Based Threat Indicators in Microsoft Defender for Endpoints. All this might result in additional indicator and data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Microsoft Azure Active Directory Application**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new Azure Active Directory application:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Azure Active Directory**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of CofenseIntelligence Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for Microsoft Azure Active Directory Application**\n\n Sometimes called an application password, a client secret is a string value required for the execution of CofenseIntelligence Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of CofenseIntelligence Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to Microsoft Azure Active Directory Application**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Assign Defender Threat Indicator permissions to Microsoft Azure Active Directory Application**\n\n Follow the steps in this section to assign the permissions:\n 1. In the Azure portal, in **App registrations**, select **your application**.\n 2. To enable an app to access Defender for Endpoint indicators, assign it **'Ti.ReadWrite.All'** permission, on your application page, select **API Permissions > Add permission > APIs my organization uses >, type WindowsDefenderATP, and then select WindowsDefenderATP**.\n 3. Select **Application permissions > Ti.ReadWrite.All**, and then select **Add permissions**.\n 4. Select **Grant consent**. \n\n> **Reference link:** [https://docs.microsoft.com/microsoft-365/security/defender-endpoint/exposed-apis-create-app-webapp?view=o365-worldwide](https://docs.microsoft.com/microsoft-365/security/defender-endpoint/exposed-apis-create-app-webapp?view=o365-worldwide)""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create/get Credentials for the Cofense Intelligence account** \n\n Follow the steps in this section to create/get **Cofense Username** and **Password**:\n 1. Login to https://threathq.com and go to the **Settings menu** on the left navigation bar.\n 2. Choose the API Tokens tab and select **Add a New Token**\n 3. Make sure to save the **password**, as it will not be accessible again.""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Cofense Intelligence Threat Indicators data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Cofense API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cofense connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CofenseIntelligence-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tCofense BaseURL (https:///) \n\t\tCofense Username \n\t\tCofense Password \n\t\tAzure Client ID \n\t\tAzure Client Secret \n\t\tAzure Tenant ID \n\t\tAzure Resource Group Name \n\t\tAzure Workspace Name \n\t\tAzure Subscription ID \n\t\tRequireProxy \n\t\tProxy Username (optional) \n\t\tProxy Password (optional) \n\t\tProxy URL (optional) \n\t\tProxy Port (optional) \n\t\tLogLevel (optional) \n\t\tMalware_Data_Table_name\n\t\tSendCofenseIndicatorToDefender \n\t\tSchedule \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cofense Intelligence Threat Indicators data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CofenseIntelligence-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CofenseXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tCofense BaseURL (https:///) \n\t\tCofense Username \n\t\tCofense Password \n\t\tAzure Client ID \n\t\tAzure Client Secret \n\t\tAzure Tenant ID \n\t\tAzure Resource Group Name \n\t\tAzure Workspace Name \n\t\tFunction App Name \n\t\tAzure Subscription ID \n\t\tRequireProxy \n\t\tProxy Username (optional) \n\t\tProxy Password (optional) \n\t\tProxy URL (optional) \n\t\tProxy Port (optional) \n\t\tLogLevel (optional) \n\t\tMalware_Data_Table_name\n\t\tSendCofenseIndicatorToDefender \n\t\tSchedule \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Cofense Username** and **Password** is required. See the documentation to learn more about API on the [Rest API reference](https://www.threathq.com/docs/rest_api_reference.html)""}, {""name"": ""Microsoft Defender for Endpoints"", ""description"": ""**Microsoft Defender for Endpoints License** is required for SentinelToDefender function.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseIntelligence/Data%20Connectors/CofenseIntelligenceDataConnector/CofenseIntelligence_API_FunctionApp.json","true" +"Cofense_Triage_failed_indicators_CL","CofenseTriage","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseTriage","cofense","cofense-triage-sentinel","2023-03-24","2023-03-24","","Cofense Support","Partner","https://cofense.com/contact-support/","","domains","CofenseTriage","Cofense","Cofense Triage Threat Indicators Ingestion","The [Cofense-Triage](https://cofense.com/product-services/cofense-triage/) data connector provides the following capabilities:
1. CofenseBasedIndicatorCreator :
>* Get Threat Indicators from the Cofense Triage platform and create Threat Intelligence Indicators in Microsoft Sentinel.
> * Ingest Cofense Indicator ID and report links into custom logs table.
2. NonCofenseBasedIndicatorCreatorToCofense :
>* Get Non-Cofense Threat Intelligence Indicators from Microsoft Sentinel Threat Intelligence and create/update Indicators in Cofense Triage platform.
3. IndicatorCreatorToDefender :
>* Get Cofense Triage Threat Intelligence Indicators from Microsoft Sentinel Threat Intelligence and create/update Indicators in Microsoft Defender for Endpoints.
4. RetryFailedIndicators :
>* Get failed indicators from failed indicators files and retry creating/updating Threat Intelligence indicators in Microsoft Sentinel.


For more details of REST APIs refer to the below two documentations:
1. Cofense API documentation:
> https://``/docs/api/v2/index.html
2. Microsoft Threat Intelligence Indicator documentation:
> https://learn.microsoft.com/rest/api/securityinsights/preview/threat-intelligence-indicator
3. Microsoft Defender for Endpoints Indicator documentation:
> https://learn.microsoft.com/microsoft-365/security/defender-endpoint/ti-indicator?view=o365-worldwide","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Cofense APIs to pull its Threat Indicators and create Threat Intelligence Indicators into Microsoft Sentinel Threat Intelligence and pulls Non-Cofense Threat Intelligence Indicators from Microsoft Sentinel and create/update Threat Indicators in Cofense. Likewise, it also creates/updates Cofense Based Threat Indicators in Microsoft Defender for Endpoints. All this might result in additional indicator and data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Microsoft Azure Active Directory Application**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new Azure Active Directory application:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Azure Active Directory**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of CofenseTriage Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for Microsoft Azure Active Directory Application**\n\n Sometimes called an application password, a client secret is a string value required for the execution of CofenseTriage Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of CofenseTriage Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to Microsoft Azure Active Directory Application**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Assign Defender Threat Indicator permissions to Microsoft Azure Active Directory Application**\n\n Follow the steps in this section to assign the permissions:\n 1. In the Azure portal, in **App registrations**, select **your application**.\n 2. To enable an app to access Defender for Endpoint indicators, assign it **'Ti.ReadWrite.All'** permission, on your application page, select **API Permissions > Add permission > APIs my organization uses >, type WindowsDefenderATP, and then select WindowsDefenderATP**.\n 3. Select **Application permissions > Ti.ReadWrite.All**, and then select **Add permissions**.\n 4. Select **Grant consent**. \n\n> **Reference link:** [https://docs.microsoft.com/microsoft-365/security/defender-endpoint/exposed-apis-create-app-webapp?view=o365-worldwide](https://docs.microsoft.com/microsoft-365/security/defender-endpoint/exposed-apis-create-app-webapp?view=o365-worldwide)""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create/get Credentials for the Cofense Triage account** \n\n Follow the steps in this section to create/get **Cofense Client ID** and **Client Secret**:\n 1. Go to **Administration > API Management > Version 2 tab > Applications**\n 2. Click on **New Application**\n 3. Add the required information and click on **submit**.""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Cofense Triage Threat Indicators data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Cofense API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cofense connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CofenseTriage-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tCofense URL (https:///) \n\t\tCofense Client ID \n\t\tCofense Client Secret \n\t\tAzure Client ID \n\t\tAzure Client Secret \n\t\tAzure Tenant ID \n\t\tAzure Resource Group Name \n\t\tAzure Workspace Name \n\t\tAzure Subscription ID \n\t\tThreat Level \n\t\tProxy Username (optional) \n\t\tProxy Password (optional) \n\t\tProxy URL (optional) \n\t\tProxy Port (optional) \n\t\tThrottle Limit for Non-Cofense Indicators (optional) \n\t\tLogLevel (optional) \n\t\tReports Table Name \n\t\tSchedule \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cofense Triage Threat Indicators data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CofenseThreatIndicatorsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CofenseXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tCofense URL (https:///) \n\t\tCofense Client ID \n\t\tCofense Client Secret \n\t\tAzure Client ID \n\t\tAzure Client Secret \n\t\tAzure Tenant ID \n\t\tAzure Resource Group Name \n\t\tAzure Workspace Name \n\t\tAzure Subscription ID \n\t\tThreat Level \n\t\tProxy Username (optional) \n\t\tProxy Password (optional) \n\t\tProxy URL (optional) \n\t\tProxy Port (optional) \n\t\tThrottle Limit for Non-Cofense Indicators (optional) \n\t\tLogLevel (optional) \n\t\tReports Table Name \n\t\tSchedule \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Cofense Client ID** and **Client Secret** is required. See the documentation to learn more about API on the `https:///docs/api/v2/index.html`""}, {""name"": ""Microsoft Defender for Endpoints"", ""description"": ""**Microsoft Defender for Endpoints License** is required for IndicatorCreatorToDefender function.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseTriage/Data%20Connectors/CofenseTriageDataConnector/CofenseTriage_API_FunctionApp.json","true" +"Report_links_data_CL","CofenseTriage","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseTriage","cofense","cofense-triage-sentinel","2023-03-24","2023-03-24","","Cofense Support","Partner","https://cofense.com/contact-support/","","domains","CofenseTriage","Cofense","Cofense Triage Threat Indicators Ingestion","The [Cofense-Triage](https://cofense.com/product-services/cofense-triage/) data connector provides the following capabilities:
1. CofenseBasedIndicatorCreator :
>* Get Threat Indicators from the Cofense Triage platform and create Threat Intelligence Indicators in Microsoft Sentinel.
> * Ingest Cofense Indicator ID and report links into custom logs table.
2. NonCofenseBasedIndicatorCreatorToCofense :
>* Get Non-Cofense Threat Intelligence Indicators from Microsoft Sentinel Threat Intelligence and create/update Indicators in Cofense Triage platform.
3. IndicatorCreatorToDefender :
>* Get Cofense Triage Threat Intelligence Indicators from Microsoft Sentinel Threat Intelligence and create/update Indicators in Microsoft Defender for Endpoints.
4. RetryFailedIndicators :
>* Get failed indicators from failed indicators files and retry creating/updating Threat Intelligence indicators in Microsoft Sentinel.


For more details of REST APIs refer to the below two documentations:
1. Cofense API documentation:
> https://``/docs/api/v2/index.html
2. Microsoft Threat Intelligence Indicator documentation:
> https://learn.microsoft.com/rest/api/securityinsights/preview/threat-intelligence-indicator
3. Microsoft Defender for Endpoints Indicator documentation:
> https://learn.microsoft.com/microsoft-365/security/defender-endpoint/ti-indicator?view=o365-worldwide","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Cofense APIs to pull its Threat Indicators and create Threat Intelligence Indicators into Microsoft Sentinel Threat Intelligence and pulls Non-Cofense Threat Intelligence Indicators from Microsoft Sentinel and create/update Threat Indicators in Cofense. Likewise, it also creates/updates Cofense Based Threat Indicators in Microsoft Defender for Endpoints. All this might result in additional indicator and data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Microsoft Azure Active Directory Application**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new Azure Active Directory application:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Azure Active Directory**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of CofenseTriage Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for Microsoft Azure Active Directory Application**\n\n Sometimes called an application password, a client secret is a string value required for the execution of CofenseTriage Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of CofenseTriage Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to Microsoft Azure Active Directory Application**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Assign Defender Threat Indicator permissions to Microsoft Azure Active Directory Application**\n\n Follow the steps in this section to assign the permissions:\n 1. In the Azure portal, in **App registrations**, select **your application**.\n 2. To enable an app to access Defender for Endpoint indicators, assign it **'Ti.ReadWrite.All'** permission, on your application page, select **API Permissions > Add permission > APIs my organization uses >, type WindowsDefenderATP, and then select WindowsDefenderATP**.\n 3. Select **Application permissions > Ti.ReadWrite.All**, and then select **Add permissions**.\n 4. Select **Grant consent**. \n\n> **Reference link:** [https://docs.microsoft.com/microsoft-365/security/defender-endpoint/exposed-apis-create-app-webapp?view=o365-worldwide](https://docs.microsoft.com/microsoft-365/security/defender-endpoint/exposed-apis-create-app-webapp?view=o365-worldwide)""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create/get Credentials for the Cofense Triage account** \n\n Follow the steps in this section to create/get **Cofense Client ID** and **Client Secret**:\n 1. Go to **Administration > API Management > Version 2 tab > Applications**\n 2. Click on **New Application**\n 3. Add the required information and click on **submit**.""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Cofense Triage Threat Indicators data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Cofense API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cofense connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CofenseTriage-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tCofense URL (https:///) \n\t\tCofense Client ID \n\t\tCofense Client Secret \n\t\tAzure Client ID \n\t\tAzure Client Secret \n\t\tAzure Tenant ID \n\t\tAzure Resource Group Name \n\t\tAzure Workspace Name \n\t\tAzure Subscription ID \n\t\tThreat Level \n\t\tProxy Username (optional) \n\t\tProxy Password (optional) \n\t\tProxy URL (optional) \n\t\tProxy Port (optional) \n\t\tThrottle Limit for Non-Cofense Indicators (optional) \n\t\tLogLevel (optional) \n\t\tReports Table Name \n\t\tSchedule \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cofense Triage Threat Indicators data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CofenseThreatIndicatorsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CofenseXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tCofense URL (https:///) \n\t\tCofense Client ID \n\t\tCofense Client Secret \n\t\tAzure Client ID \n\t\tAzure Client Secret \n\t\tAzure Tenant ID \n\t\tAzure Resource Group Name \n\t\tAzure Workspace Name \n\t\tAzure Subscription ID \n\t\tThreat Level \n\t\tProxy Username (optional) \n\t\tProxy Password (optional) \n\t\tProxy URL (optional) \n\t\tProxy Port (optional) \n\t\tThrottle Limit for Non-Cofense Indicators (optional) \n\t\tLogLevel (optional) \n\t\tReports Table Name \n\t\tSchedule \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Cofense Client ID** and **Client Secret** is required. See the documentation to learn more about API on the `https:///docs/api/v2/index.html`""}, {""name"": ""Microsoft Defender for Endpoints"", ""description"": ""**Microsoft Defender for Endpoints License** is required for IndicatorCreatorToDefender function.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseTriage/Data%20Connectors/CofenseTriageDataConnector/CofenseTriage_API_FunctionApp.json","true" +"ThreatIntelligenceIndicator","CofenseTriage","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseTriage","cofense","cofense-triage-sentinel","2023-03-24","2023-03-24","","Cofense Support","Partner","https://cofense.com/contact-support/","","domains","CofenseTriage","Cofense","Cofense Triage Threat Indicators Ingestion","The [Cofense-Triage](https://cofense.com/product-services/cofense-triage/) data connector provides the following capabilities:
1. CofenseBasedIndicatorCreator :
>* Get Threat Indicators from the Cofense Triage platform and create Threat Intelligence Indicators in Microsoft Sentinel.
> * Ingest Cofense Indicator ID and report links into custom logs table.
2. NonCofenseBasedIndicatorCreatorToCofense :
>* Get Non-Cofense Threat Intelligence Indicators from Microsoft Sentinel Threat Intelligence and create/update Indicators in Cofense Triage platform.
3. IndicatorCreatorToDefender :
>* Get Cofense Triage Threat Intelligence Indicators from Microsoft Sentinel Threat Intelligence and create/update Indicators in Microsoft Defender for Endpoints.
4. RetryFailedIndicators :
>* Get failed indicators from failed indicators files and retry creating/updating Threat Intelligence indicators in Microsoft Sentinel.


For more details of REST APIs refer to the below two documentations:
1. Cofense API documentation:
> https://``/docs/api/v2/index.html
2. Microsoft Threat Intelligence Indicator documentation:
> https://learn.microsoft.com/rest/api/securityinsights/preview/threat-intelligence-indicator
3. Microsoft Defender for Endpoints Indicator documentation:
> https://learn.microsoft.com/microsoft-365/security/defender-endpoint/ti-indicator?view=o365-worldwide","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Cofense APIs to pull its Threat Indicators and create Threat Intelligence Indicators into Microsoft Sentinel Threat Intelligence and pulls Non-Cofense Threat Intelligence Indicators from Microsoft Sentinel and create/update Threat Indicators in Cofense. Likewise, it also creates/updates Cofense Based Threat Indicators in Microsoft Defender for Endpoints. All this might result in additional indicator and data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Microsoft Azure Active Directory Application**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new Azure Active Directory application:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Azure Active Directory**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of CofenseTriage Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for Microsoft Azure Active Directory Application**\n\n Sometimes called an application password, a client secret is a string value required for the execution of CofenseTriage Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of CofenseTriage Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to Microsoft Azure Active Directory Application**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Assign Defender Threat Indicator permissions to Microsoft Azure Active Directory Application**\n\n Follow the steps in this section to assign the permissions:\n 1. In the Azure portal, in **App registrations**, select **your application**.\n 2. To enable an app to access Defender for Endpoint indicators, assign it **'Ti.ReadWrite.All'** permission, on your application page, select **API Permissions > Add permission > APIs my organization uses >, type WindowsDefenderATP, and then select WindowsDefenderATP**.\n 3. Select **Application permissions > Ti.ReadWrite.All**, and then select **Add permissions**.\n 4. Select **Grant consent**. \n\n> **Reference link:** [https://docs.microsoft.com/microsoft-365/security/defender-endpoint/exposed-apis-create-app-webapp?view=o365-worldwide](https://docs.microsoft.com/microsoft-365/security/defender-endpoint/exposed-apis-create-app-webapp?view=o365-worldwide)""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create/get Credentials for the Cofense Triage account** \n\n Follow the steps in this section to create/get **Cofense Client ID** and **Client Secret**:\n 1. Go to **Administration > API Management > Version 2 tab > Applications**\n 2. Click on **New Application**\n 3. Add the required information and click on **submit**.""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Cofense Triage Threat Indicators data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Cofense API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cofense connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CofenseTriage-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tCofense URL (https:///) \n\t\tCofense Client ID \n\t\tCofense Client Secret \n\t\tAzure Client ID \n\t\tAzure Client Secret \n\t\tAzure Tenant ID \n\t\tAzure Resource Group Name \n\t\tAzure Workspace Name \n\t\tAzure Subscription ID \n\t\tThreat Level \n\t\tProxy Username (optional) \n\t\tProxy Password (optional) \n\t\tProxy URL (optional) \n\t\tProxy Port (optional) \n\t\tThrottle Limit for Non-Cofense Indicators (optional) \n\t\tLogLevel (optional) \n\t\tReports Table Name \n\t\tSchedule \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cofense Triage Threat Indicators data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CofenseThreatIndicatorsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CofenseXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tCofense URL (https:///) \n\t\tCofense Client ID \n\t\tCofense Client Secret \n\t\tAzure Client ID \n\t\tAzure Client Secret \n\t\tAzure Tenant ID \n\t\tAzure Resource Group Name \n\t\tAzure Workspace Name \n\t\tAzure Subscription ID \n\t\tThreat Level \n\t\tProxy Username (optional) \n\t\tProxy Password (optional) \n\t\tProxy URL (optional) \n\t\tProxy Port (optional) \n\t\tThrottle Limit for Non-Cofense Indicators (optional) \n\t\tLogLevel (optional) \n\t\tReports Table Name \n\t\tSchedule \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Cofense Client ID** and **Client Secret** is required. See the documentation to learn more about API on the `https:///docs/api/v2/index.html`""}, {""name"": ""Microsoft Defender for Endpoints"", ""description"": ""**Microsoft Defender for Endpoints License** is required for IndicatorCreatorToDefender function.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseTriage/Data%20Connectors/CofenseTriageDataConnector/CofenseTriage_API_FunctionApp.json","true" +"CognniIncidents_CL","Cognni","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cognni","shieldox","cognni_for_microsoft_sentinel","2022-05-06","","","Cognni","Partner","https://cognni.ai/contact-support/","","domains","CognniSentinelDataConnector","Cognni","Cognni","The Cognni connector offers a quick and simple integration with Microsoft Sentinel. You can use Cognni to autonomously map your previously unclassified important information and detect related incidents. This allows you to recognize risks to your important information, understand the severity of the incidents, and investigate the details you need to remediate, fast enough to make a difference.","[{""title"": ""Connect to Cognni"", ""description"": ""1. Go to [Cognni integrations page](https://intelligence.cognni.ai/integrations)\n2. Click **'Connect'** on the 'Microsoft Sentinel' box\n3. Copy and paste **'workspaceId'** and **'sharedKey'** (from below) to the related fields on Cognni's integrations screen\n4. Click the **'Connect'** botton to complete the configuration. \n Soon, all your Cognni-detected incidents will be forwarded here (into Microsoft Sentinel)\n\nNot a Cognni user? [Join us](https://azuremarketplace.microsoft.com/en-us/marketplace/apps/shieldox.appsource_freetrial)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Shared Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cognni/Data%20Connectors/CognniSentinelConnector.json","true" +"ThreatIntelligenceIndicator","CognyteLuminar","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CognyteLuminar","cognytetechnologiesisraelltd","microsoft-sentinel-solution-cognyte-luminar","2023-09-15","","","Cognyte Luminar","Partner","https://www.cognyte.com/contact/","","domains","CognyteLuminar","Cognyte Technologies Israel Ltd","Luminar IOCs and Leaked Credentials","Luminar IOCs and Leaked Credentials connector allows integration of intelligence-based IOC data and customer-related leaked records identified by Luminar.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Cognyte Luminar API to pull Luminar IOCs and Leaked Credentials into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template for Flex Consumption Plan"", ""description"": ""Use this method for automated deployment of the data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CognyteLuminar-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Application ID**, **Tenant ID**,**Client Secret**, **Luminar API Client ID**, **Luminar API Account ID**, **Luminar API Client Secret**, **Luminar Initial Fetch Date**, **TimeInterval** and deploy.\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template for Premium Plan"", ""description"": ""Use this method for automated deployment of the data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CognyteLuminar-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Application ID**, **Tenant ID**,**Client Secret**, **Luminar API Client ID**, **Luminar API Account ID**, **Luminar API Client Secret**, **Luminar Initial Fetch Date**, **TimeInterval** and deploy.\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cognyte Luminar data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> NOTE:You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CognyteLuminar-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CognyteLuminarXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\\n\\n1. In the Function App, select the Function App Name and select **Configuration**.\\n2. In the **Application settings** tab, select **+ New application setting**.\\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \\n\\tApplication ID\\n\\tTenant ID\\n\\tClient Secret\\n\\tLuminar API Client ID\\n\\tLuminar API Account ID\\n\\tLuminar API Client Secret\\n\\tLuminar Initial Fetch Date\\n\\tTimeInterval - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`\\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Luminar Client ID**, **Luminar Client Secret** and **Luminar Account ID** are required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CognyteLuminar/Data%20Connectors/CognyteLuminar_FunctionApp.json","true" +"Cohesity_CL","CohesitySecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CohesitySecurity","cohesitydev1592001764720","cohesity_sentinel_data_connector","2022-10-10","","","Cohesity","Partner","https://support.cohesity.com/","","domains","CohesityDataConnector","Cohesity","Cohesity","The Cohesity function apps provide the ability to ingest Cohesity Datahawk ransomware alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions that connect to the Azure Blob Storage and KeyVault. This might result in additional costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/), [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) and [Azure KeyVault pricing page](https://azure.microsoft.com/pricing/details/key-vault/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Get a Cohesity DataHawk API key (see troubleshooting [instruction 1](https://github.com/Azure/Azure-Sentinel/tree/master/Solutions/CohesitySecurity/Data%20Connectors/Helios2Sentinel/IncidentProducer))**""}, {""title"": """", ""description"": ""**STEP 2 - Register Azure app ([link](https://portal.azure.com/#view/Microsoft_AAD_IAM/ActiveDirectoryMenuBlade/~/RegisteredApps)) and save Application (client) ID, Directory (tenant) ID, and Secret Value ([instructions](https://learn.microsoft.com/en-us/azure/healthcare-apis/register-application)). Grant it Azure Storage (user_impersonation) permission. Also, assign the 'Microsoft Sentinel Contributor' role to the application in the appropriate subscription.**""}, {""title"": """", ""description"": ""**STEP 3 - Deploy the connector and the associated Azure Functions**.""}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cohesity data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-Cohesity-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the parameters that you created at the previous steps\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Azure Blob Storage connection string and container name"", ""description"": ""Azure Blob Storage connection string and container name""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CohesitySecurity/Data%20Connectors/Helios2Sentinel/Cohesity_API_FunctionApp.json","true" +"","Common Event Format","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Common%20Event%20Format","azuresentinel","azure-sentinel-solution-commoneventformat","2022-05-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"CommvaultSecurityIQ_CL","Commvault Security IQ","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Commvault%20Security%20IQ","commvault","microsoft-sentinel-solution-commvaultsecurityiq","2023-08-17","","","Commvault","Partner","https://www.commvault.com/support","","domains","CommvaultSecurityIQ_CL","Commvault","CommvaultSecurityIQ","This Azure Function enables Commvault users to ingest alerts/events into their Microsoft Sentinel instance. With Analytic Rules,Microsoft Sentinel can automatically create Microsoft Sentinel incidents from incoming events and logs.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Commvault Instance to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Commvalut QSDK Token**\n\n[Follow these instructions](https://documentation.commvault.com/2024e/essential/creating_access_token.html) to create an API Token.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the CommvaultSecurityIQ data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Commvault Endpoint URL and QSDK Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""**Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the Commvault Security IQ data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CommvaultSecurityIQ-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Region**. \n3. Enter the **Workspace ID**, **Workspace Key** 'and/or Other required fields' and click Next. \n4. Click **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Commvault Environment Endpoint URL"", ""description"": ""Make sure to follow the documentation and set the secret value in KeyVault""}, {""name"": ""Commvault QSDK Token"", ""description"": ""Make sure to follow the documentation and set the secret value in KeyVault""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Commvault%20Security%20IQ/Data%20Connectors/CommvaultSecurityIQ_API_AzureFunctionApp.json","true" +"","ContinuousDiagnostics&Mitigation","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ContinuousDiagnostics%26Mitigation","azuresentinel","azure-sentinel-solution-continuousdiagnostics","2022-08-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"CommonSecurityLog","Contrast Protect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Contrast%20Protect","contrast_security","contrast_protect_azure_sentinel_solution","2021-10-20","","","Contrast Protect","Partner","https://docs.contrastsecurity.com/","","domains","ContrastProtect","Contrast Security","[Deprecated] Contrast Protect via Legacy Agent","Contrast Protect mitigates security threats in production applications with runtime protection and observability. Attack event results (blocked, probed, suspicious...) and other information can be sent to Microsoft Microsoft Sentinel to blend with security information from other systems.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Configure the Contrast Protect agent to forward events to syslog as described here: https://docs.contrastsecurity.com/en/output-to-syslog.html. Generate some attack events for your application.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Contrast%20Protect/Data%20Connectors/ContrastProtect.json","true" +"CommonSecurityLog","Contrast Protect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Contrast%20Protect","contrast_security","contrast_protect_azure_sentinel_solution","2021-10-20","","","Contrast Protect","Partner","https://docs.contrastsecurity.com/","","domains","ContrastProtectAma","Contrast Security","[Deprecated] Contrast Protect via AMA","Contrast Protect mitigates security threats in production applications with runtime protection and observability. Attack event results (blocked, probed, suspicious...) and other information can be sent to Microsoft Microsoft Sentinel to blend with security information from other systems.","[{""title"": """", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Configure the Contrast Protect agent to forward events to syslog as described here: https://docs.contrastsecurity.com/en/output-to-syslog.html. Generate some attack events for your application."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Contrast%20Protect/Data%20Connectors/template_ContrastProtectAMA.json","true" +"ContrastADRIncident_CL","ContrastADR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ContrastADR","contrast_security","contrast_adr_azure_sentinel_solution","2025-01-18","2025-01-18","","Contrast Security","Partner","https://support.contrastsecurity.com/hc/en-us","","domains","ContrastADR","Contrast Security","ContrastADR","The ContrastADR data connector provides the capability to ingest Contrast ADR attack events into Microsoft Sentinel using the ContrastADR Webhook. ContrastADR data connector can enrich the incoming webhook data with ContrastADR API enrichment calls.","[{""title"": """", ""description"": ""Use these Workspace id and primakey key as shared key in azure function app"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method to automate deployment of the ContrastADR Data Connector using ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ContrastADR-azuredeploy)\n2. Provide the following parameters: Region, Function Name, LOG_ANALYTICS_SHARED_KEY, LOG_ANALYTICS_WORKSPACE_ID ""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ContrastADR/Data%20Connectors/ContrastADR_API_FunctionApp.json","true" +"ContrastADR_CL","ContrastADR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ContrastADR","contrast_security","contrast_adr_azure_sentinel_solution","2025-01-18","2025-01-18","","Contrast Security","Partner","https://support.contrastsecurity.com/hc/en-us","","domains","ContrastADR","Contrast Security","ContrastADR","The ContrastADR data connector provides the capability to ingest Contrast ADR attack events into Microsoft Sentinel using the ContrastADR Webhook. ContrastADR data connector can enrich the incoming webhook data with ContrastADR API enrichment calls.","[{""title"": """", ""description"": ""Use these Workspace id and primakey key as shared key in azure function app"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method to automate deployment of the ContrastADR Data Connector using ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ContrastADR-azuredeploy)\n2. Provide the following parameters: Region, Function Name, LOG_ANALYTICS_SHARED_KEY, LOG_ANALYTICS_WORKSPACE_ID ""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ContrastADR/Data%20Connectors/ContrastADR_API_FunctionApp.json","true" +"Corelight_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_bacnet_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_capture_loss_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_cip_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_conn_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_conn_long_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_conn_red_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_corelight_burst_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_corelight_overall_capture_loss_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_corelight_profiling_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_datared_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_dce_rpc_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_dga_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_dhcp_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_dnp3_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_dns_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_dns_red_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_dpd_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_encrypted_dns_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_enip_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_enip_debug_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_enip_list_identity_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_etc_viz_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_files_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_files_red_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_ftp_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_generic_dns_tunnels_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_generic_icmp_tunnels_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_http2_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_http_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_http_red_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_icmp_specific_tunnels_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_intel_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_ipsec_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_irc_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_iso_cotp_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_kerberos_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_known_certs_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_known_devices_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_known_domains_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_known_hosts_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_known_names_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_known_remotes_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_known_services_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_known_users_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_local_subnets_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_local_subnets_dj_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_local_subnets_graphs_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_log4shell_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_modbus_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_mqtt_connect_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_mqtt_publish_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_mqtt_subscribe_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_mysql_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_notice_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_ntlm_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_ntp_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_ocsp_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_openflow_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_packet_filter_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_pe_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_profinet_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_profinet_dce_rpc_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_profinet_debug_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_radius_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_rdp_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_reporter_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_rfb_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_s7comm_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_signatures_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_sip_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_smartpcap_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_smartpcap_stats_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_smb_files_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_smb_mapping_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_smtp_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_smtp_links_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_snmp_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_socks_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_software_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_specific_dns_tunnels_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_ssh_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_ssl_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_ssl_red_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_stats_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_stepping_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_stun_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_stun_nat_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_suricata_corelight_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_suricata_eve_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_suricata_stats_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_suricata_zeek_stats_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_syslog_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_tds_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_tds_rpc_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_tds_sql_batch_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_traceroute_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_tunnel_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_unknown_smartpcap_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_util_stats_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_vpn_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_weird_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_weird_red_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_weird_stats_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_wireguard_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_x509_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_x509_red_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_zeek_doctor_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"PaloAltoCortexXDR_Alerts_CL","Cortex XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR","defendlimited1682894612656","cortex_xdr_connector","2023-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Palo Alto Cortex XDR API \n Follow the instructions to obtain the credentials. you can also follow this [guide](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/3u3j0e7hcx8t1-get-started-with-cortex-xdr-ap-is) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\n 1.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 1.3. Under [**Integrations**] click on [**API Keys**].\n 1.4. In the [**Settings**] Page click on [**Copy API URL**] in the top right corner.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 2.3. Under [**Integrations**] click on [**API Keys**].\n 2.4. In the [**Settings**] Page click on [**New Key**] in the top right corner.\n 2.5. Choose security level, role, choose Standard and click on [**Generate**]\n 2.6. Copy the API Token, once it generated the [**API Token ID**] can be found under the ID column""}}, {""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api-example.xdr.au.paloaltonetworks.com"", ""type"": ""text"", ""name"": ""apiUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Key ID"", ""placeholder"": ""API ID"", ""type"": ""text"", ""name"": ""apiId""}, ""type"": ""Textbox""}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""apiToken""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" +"PaloAltoCortexXDR_Audit_Agent_CL","Cortex XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR","defendlimited1682894612656","cortex_xdr_connector","2023-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Palo Alto Cortex XDR API \n Follow the instructions to obtain the credentials. you can also follow this [guide](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/3u3j0e7hcx8t1-get-started-with-cortex-xdr-ap-is) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\n 1.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 1.3. Under [**Integrations**] click on [**API Keys**].\n 1.4. In the [**Settings**] Page click on [**Copy API URL**] in the top right corner.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 2.3. Under [**Integrations**] click on [**API Keys**].\n 2.4. In the [**Settings**] Page click on [**New Key**] in the top right corner.\n 2.5. Choose security level, role, choose Standard and click on [**Generate**]\n 2.6. Copy the API Token, once it generated the [**API Token ID**] can be found under the ID column""}}, {""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api-example.xdr.au.paloaltonetworks.com"", ""type"": ""text"", ""name"": ""apiUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Key ID"", ""placeholder"": ""API ID"", ""type"": ""text"", ""name"": ""apiId""}, ""type"": ""Textbox""}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""apiToken""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" +"PaloAltoCortexXDR_Audit_Management_CL","Cortex XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR","defendlimited1682894612656","cortex_xdr_connector","2023-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Palo Alto Cortex XDR API \n Follow the instructions to obtain the credentials. you can also follow this [guide](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/3u3j0e7hcx8t1-get-started-with-cortex-xdr-ap-is) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\n 1.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 1.3. Under [**Integrations**] click on [**API Keys**].\n 1.4. In the [**Settings**] Page click on [**Copy API URL**] in the top right corner.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 2.3. Under [**Integrations**] click on [**API Keys**].\n 2.4. In the [**Settings**] Page click on [**New Key**] in the top right corner.\n 2.5. Choose security level, role, choose Standard and click on [**Generate**]\n 2.6. Copy the API Token, once it generated the [**API Token ID**] can be found under the ID column""}}, {""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api-example.xdr.au.paloaltonetworks.com"", ""type"": ""text"", ""name"": ""apiUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Key ID"", ""placeholder"": ""API ID"", ""type"": ""text"", ""name"": ""apiId""}, ""type"": ""Textbox""}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""apiToken""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" +"PaloAltoCortexXDR_Endpoints_CL","Cortex XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR","defendlimited1682894612656","cortex_xdr_connector","2023-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Palo Alto Cortex XDR API \n Follow the instructions to obtain the credentials. you can also follow this [guide](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/3u3j0e7hcx8t1-get-started-with-cortex-xdr-ap-is) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\n 1.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 1.3. Under [**Integrations**] click on [**API Keys**].\n 1.4. In the [**Settings**] Page click on [**Copy API URL**] in the top right corner.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 2.3. Under [**Integrations**] click on [**API Keys**].\n 2.4. In the [**Settings**] Page click on [**New Key**] in the top right corner.\n 2.5. Choose security level, role, choose Standard and click on [**Generate**]\n 2.6. Copy the API Token, once it generated the [**API Token ID**] can be found under the ID column""}}, {""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api-example.xdr.au.paloaltonetworks.com"", ""type"": ""text"", ""name"": ""apiUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Key ID"", ""placeholder"": ""API ID"", ""type"": ""text"", ""name"": ""apiId""}, ""type"": ""Textbox""}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""apiToken""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" +"PaloAltoCortexXDR_Incidents_CL","Cortex XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR","defendlimited1682894612656","cortex_xdr_connector","2023-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Palo Alto Cortex XDR API \n Follow the instructions to obtain the credentials. you can also follow this [guide](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/3u3j0e7hcx8t1-get-started-with-cortex-xdr-ap-is) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\n 1.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 1.3. Under [**Integrations**] click on [**API Keys**].\n 1.4. In the [**Settings**] Page click on [**Copy API URL**] in the top right corner.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 2.3. Under [**Integrations**] click on [**API Keys**].\n 2.4. In the [**Settings**] Page click on [**New Key**] in the top right corner.\n 2.5. Choose security level, role, choose Standard and click on [**Generate**]\n 2.6. Copy the API Token, once it generated the [**API Token ID**] can be found under the ID column""}}, {""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api-example.xdr.au.paloaltonetworks.com"", ""type"": ""text"", ""name"": ""apiUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Key ID"", ""placeholder"": ""API ID"", ""type"": ""text"", ""name"": ""apiId""}, ""type"": ""Textbox""}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""apiToken""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" +"CriblAccess_CL","Cribl","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cribl","criblinc1673975616879","microsoft-sentinel-solution-cribl","2024-08-01","2024-09-05","","Cribl","Partner","https://www.cribl.io/support/","","domains","Cribl","Cribl","Cribl","The [Cribl](https://cribl.io/accelerate-cloud-migration/) connector allows you to easily connect your Cribl (Cribl Enterprise Edition - Standalone) logs with Microsoft Sentinel. This gives you more security insight into your organization's data pipelines.","[{""title"": ""Installation and setup instructions for Cribl Stream for Microsoft Sentinel"", ""description"": ""Use the documentation from this Github repository and configure Cribl Stream using \n\nhttps://docs.cribl.io/stream/usecase-azure-workspace/""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cribl/Data%20Connectors/Connector_Cribl.json","true" +"CriblAudit_CL","Cribl","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cribl","criblinc1673975616879","microsoft-sentinel-solution-cribl","2024-08-01","2024-09-05","","Cribl","Partner","https://www.cribl.io/support/","","domains","Cribl","Cribl","Cribl","The [Cribl](https://cribl.io/accelerate-cloud-migration/) connector allows you to easily connect your Cribl (Cribl Enterprise Edition - Standalone) logs with Microsoft Sentinel. This gives you more security insight into your organization's data pipelines.","[{""title"": ""Installation and setup instructions for Cribl Stream for Microsoft Sentinel"", ""description"": ""Use the documentation from this Github repository and configure Cribl Stream using \n\nhttps://docs.cribl.io/stream/usecase-azure-workspace/""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cribl/Data%20Connectors/Connector_Cribl.json","true" +"CriblInternal_CL","Cribl","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cribl","criblinc1673975616879","microsoft-sentinel-solution-cribl","2024-08-01","2024-09-05","","Cribl","Partner","https://www.cribl.io/support/","","domains","Cribl","Cribl","Cribl","The [Cribl](https://cribl.io/accelerate-cloud-migration/) connector allows you to easily connect your Cribl (Cribl Enterprise Edition - Standalone) logs with Microsoft Sentinel. This gives you more security insight into your organization's data pipelines.","[{""title"": ""Installation and setup instructions for Cribl Stream for Microsoft Sentinel"", ""description"": ""Use the documentation from this Github repository and configure Cribl Stream using \n\nhttps://docs.cribl.io/stream/usecase-azure-workspace/""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cribl/Data%20Connectors/Connector_Cribl.json","true" +"CriblUIAccess_CL","Cribl","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cribl","criblinc1673975616879","microsoft-sentinel-solution-cribl","2024-08-01","2024-09-05","","Cribl","Partner","https://www.cribl.io/support/","","domains","Cribl","Cribl","Cribl","The [Cribl](https://cribl.io/accelerate-cloud-migration/) connector allows you to easily connect your Cribl (Cribl Enterprise Edition - Standalone) logs with Microsoft Sentinel. This gives you more security insight into your organization's data pipelines.","[{""title"": ""Installation and setup instructions for Cribl Stream for Microsoft Sentinel"", ""description"": ""Use the documentation from this Github repository and configure Cribl Stream using \n\nhttps://docs.cribl.io/stream/usecase-azure-workspace/""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cribl/Data%20Connectors/Connector_Cribl.json","true" +"CrowdStrikeAlerts","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeAPICCPDefinition","Microsoft","CrowdStrike API Data Connector (via Codeless Connector Framework)","The [CrowdStrike Data Connector](https://www.crowdstrike.com/) allows ingesting logs from the CrowdStrike API into Microsoft Sentinel. This connector is built on the Microsoft Sentinel Codeless Connector Platform and uses the CrowdStrike API to fetch logs for Alerts, Detections, Hosts, Incidents, and Vulnerabilities. It supports DCR-based ingestion time transformations so that queries can run more efficiently.","[{""title"": ""Configuration steps for the CrowdStrike API"", ""description"": ""Follow the instructions below to obtain your CrowdStrike API credentials."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\nLog in to your CrowdStrike Console and navigate to the API section to copy your Base API URL.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve Client Credentials\nObtain your Client ID and Client Secret from the API credentials section in your CrowdStrike account.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api.us-2.crowdstrike.com"", ""type"": ""text"", ""name"": ""apiUrl"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client ID"", ""placeholder"": ""Your Client ID"", ""type"": ""text"", ""name"": ""clientId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client Secret"", ""placeholder"": ""Your Client Secret"", ""type"": ""password"", ""name"": ""clientSecret"", ""validations"": {""required"": true}}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""disconnectLabel"": ""Disconnect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeAPI_ccp/CrowdStrikeAPI_Definition.json","true" +"CrowdStrikeDetections","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeAPICCPDefinition","Microsoft","CrowdStrike API Data Connector (via Codeless Connector Framework)","The [CrowdStrike Data Connector](https://www.crowdstrike.com/) allows ingesting logs from the CrowdStrike API into Microsoft Sentinel. This connector is built on the Microsoft Sentinel Codeless Connector Platform and uses the CrowdStrike API to fetch logs for Alerts, Detections, Hosts, Incidents, and Vulnerabilities. It supports DCR-based ingestion time transformations so that queries can run more efficiently.","[{""title"": ""Configuration steps for the CrowdStrike API"", ""description"": ""Follow the instructions below to obtain your CrowdStrike API credentials."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\nLog in to your CrowdStrike Console and navigate to the API section to copy your Base API URL.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve Client Credentials\nObtain your Client ID and Client Secret from the API credentials section in your CrowdStrike account.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api.us-2.crowdstrike.com"", ""type"": ""text"", ""name"": ""apiUrl"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client ID"", ""placeholder"": ""Your Client ID"", ""type"": ""text"", ""name"": ""clientId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client Secret"", ""placeholder"": ""Your Client Secret"", ""type"": ""password"", ""name"": ""clientSecret"", ""validations"": {""required"": true}}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""disconnectLabel"": ""Disconnect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeAPI_ccp/CrowdStrikeAPI_Definition.json","true" +"CrowdStrikeHosts","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeAPICCPDefinition","Microsoft","CrowdStrike API Data Connector (via Codeless Connector Framework)","The [CrowdStrike Data Connector](https://www.crowdstrike.com/) allows ingesting logs from the CrowdStrike API into Microsoft Sentinel. This connector is built on the Microsoft Sentinel Codeless Connector Platform and uses the CrowdStrike API to fetch logs for Alerts, Detections, Hosts, Incidents, and Vulnerabilities. It supports DCR-based ingestion time transformations so that queries can run more efficiently.","[{""title"": ""Configuration steps for the CrowdStrike API"", ""description"": ""Follow the instructions below to obtain your CrowdStrike API credentials."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\nLog in to your CrowdStrike Console and navigate to the API section to copy your Base API URL.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve Client Credentials\nObtain your Client ID and Client Secret from the API credentials section in your CrowdStrike account.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api.us-2.crowdstrike.com"", ""type"": ""text"", ""name"": ""apiUrl"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client ID"", ""placeholder"": ""Your Client ID"", ""type"": ""text"", ""name"": ""clientId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client Secret"", ""placeholder"": ""Your Client Secret"", ""type"": ""password"", ""name"": ""clientSecret"", ""validations"": {""required"": true}}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""disconnectLabel"": ""Disconnect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeAPI_ccp/CrowdStrikeAPI_Definition.json","true" +"CrowdStrikeIncidents","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeAPICCPDefinition","Microsoft","CrowdStrike API Data Connector (via Codeless Connector Framework)","The [CrowdStrike Data Connector](https://www.crowdstrike.com/) allows ingesting logs from the CrowdStrike API into Microsoft Sentinel. This connector is built on the Microsoft Sentinel Codeless Connector Platform and uses the CrowdStrike API to fetch logs for Alerts, Detections, Hosts, Incidents, and Vulnerabilities. It supports DCR-based ingestion time transformations so that queries can run more efficiently.","[{""title"": ""Configuration steps for the CrowdStrike API"", ""description"": ""Follow the instructions below to obtain your CrowdStrike API credentials."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\nLog in to your CrowdStrike Console and navigate to the API section to copy your Base API URL.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve Client Credentials\nObtain your Client ID and Client Secret from the API credentials section in your CrowdStrike account.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api.us-2.crowdstrike.com"", ""type"": ""text"", ""name"": ""apiUrl"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client ID"", ""placeholder"": ""Your Client ID"", ""type"": ""text"", ""name"": ""clientId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client Secret"", ""placeholder"": ""Your Client Secret"", ""type"": ""password"", ""name"": ""clientSecret"", ""validations"": {""required"": true}}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""disconnectLabel"": ""Disconnect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeAPI_ccp/CrowdStrikeAPI_Definition.json","true" +"CrowdStrikeVulnerabilities","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeAPICCPDefinition","Microsoft","CrowdStrike API Data Connector (via Codeless Connector Framework)","The [CrowdStrike Data Connector](https://www.crowdstrike.com/) allows ingesting logs from the CrowdStrike API into Microsoft Sentinel. This connector is built on the Microsoft Sentinel Codeless Connector Platform and uses the CrowdStrike API to fetch logs for Alerts, Detections, Hosts, Incidents, and Vulnerabilities. It supports DCR-based ingestion time transformations so that queries can run more efficiently.","[{""title"": ""Configuration steps for the CrowdStrike API"", ""description"": ""Follow the instructions below to obtain your CrowdStrike API credentials."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\nLog in to your CrowdStrike Console and navigate to the API section to copy your Base API URL.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve Client Credentials\nObtain your Client ID and Client Secret from the API credentials section in your CrowdStrike account.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api.us-2.crowdstrike.com"", ""type"": ""text"", ""name"": ""apiUrl"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client ID"", ""placeholder"": ""Your Client ID"", ""type"": ""text"", ""name"": ""clientId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client Secret"", ""placeholder"": ""Your Client Secret"", ""type"": ""password"", ""name"": ""clientSecret"", ""validations"": {""required"": true}}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""disconnectLabel"": ""Disconnect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeAPI_ccp/CrowdStrikeAPI_Definition.json","true" +"ThreatIntelligenceIndicator","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconAdversaryIntelligence","CrowdStrike","CrowdStrike Falcon Adversary Intelligence ","The [CrowdStrike](https://www.crowdstrike.com/) Falcon Indicators of Compromise connector retrieves the Indicators of Compromise from the Falcon Intel API and uploads them [Microsoft Sentinel Threat Intel](https://learn.microsoft.com/en-us/azure/sentinel/understand-threat-intelligence).","[{""title"": """", ""description"": ""**STEP 1 - [Generate CrowdStrike API credentials](https://www.crowdstrike.com/blog/tech-center/get-access-falcon-apis/).**\n""}, {""title"": """", ""description"": ""Make sure 'Indicators (Falcon Intelligence)' scope has 'read' selected""}, {""title"": """", ""description"": ""**STEP 2 - [Register an Entra App](https://learn.microsoft.com/en-us/entra/identity-platform/quickstart-register-app) with client secret.**\n""}, {""title"": """", ""description"": ""Provide the Entra App principal with 'Microsoft Sentinel Contributor' role assignment on the respective log analytics workspace. [How to assign roles on Azure](https://learn.microsoft.com/en-us/azure/role-based-access-control/role-assignments-portal).""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the CrowdStrike Falcon Indicator of Compromise connector, have the Workspace ID (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the CrowdStrike Falcon Adversary Intelligence connector connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdStrikeFalconAdversaryIntelligence-azuredeploy)\n2. Provide the following parameters: CrowdStrikeClientId, CrowdStrikeClientSecret, CrowdStrikeBaseUrl, WorkspaceId, TenantId, Indicators, AadClientId, AadClientSecret, LookBackDays""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the CrowdStrike Falcon Adversary Intelligence connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdStrikeFalconAdversaryIntelligence-Functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CrowdStrikeFalconIOCXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tCROWDSTRIKE_CLIENT_ID\n\t\tCROWDSTRIKE_CLIENT_SECRET\n\t\tCROWDSTRIKE_BASE_URL\n\t\tTENANT_ID\n\t\tINDICATORS\n\t\tWorkspaceKey\n\t\tAAD_CLIENT_ID\n\t\tAAD_CLIENT_SECRET \n\t\tLOOK_BACK_DAYS \n\t\tWORKSPACE_ID \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""CrowdStrike API Client ID and Client Secret"", ""description"": ""**CROWDSTRIKE_CLIENT_ID**, **CROWDSTRIKE_CLIENT_SECRET**, **CROWDSTRIKE_BASE_URL**. CrowdStrike credentials must have Indicators (Falcon Intelligence) read scope.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeFalconAdversaryIntelligence/CrowdStrikeFalconAdversaryIntelligence_FunctionApp.json","true" +"CommonSecurityLog","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconEndpointProtection","CrowdStrike","[Deprecated] CrowdStrike Falcon Endpoint Protection via Legacy Agent","The [CrowdStrike Falcon Endpoint Protection](https://www.crowdstrike.com/endpoint-security-products/) connector allows you to easily connect your CrowdStrike Falcon Event Stream with Microsoft Sentinel, to create custom dashboards, alerts, and improve investigation. This gives you more insight into your organization's endpoints and improves your security operation capabilities.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Crowd Strike Falcon Endpoint Protection and load the function code or click [here](https://aka.ms/sentinel-crowdstrikefalconendpointprotection-parser), on the second line of the query, enter the hostname(s) of your CrowdStrikeFalcon device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward CrowdStrike Falcon Event Stream logs to a Syslog agent"", ""description"": ""Deploy the CrowdStrike Falcon SIEM Collector to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent.\n1. [Follow these instructions](https://www.crowdstrike.com/blog/tech-center/integrate-with-your-siem/) to deploy the SIEM Collector and forward syslog\n2. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/Connector_Syslog_CrowdStrikeFalconEndpointProtection.json","true" +"CommonSecurityLog","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconEndpointProtectionAma","CrowdStrike","[Deprecated] CrowdStrike Falcon Endpoint Protection via AMA","The [CrowdStrike Falcon Endpoint Protection](https://www.crowdstrike.com/endpoint-security-products/) connector allows you to easily connect your CrowdStrike Falcon Event Stream with Microsoft Sentinel, to create custom dashboards, alerts, and improve investigation. This gives you more insight into your organization's endpoints and improves your security operation capabilities.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Crowd Strike Falcon Endpoint Protection and load the function code or click [here](https://aka.ms/sentinel-crowdstrikefalconendpointprotection-parser), on the second line of the query, enter the hostname(s) of your CrowdStrikeFalcon device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Forward CrowdStrike Falcon Event Stream logs to a Syslog agent"", ""description"": ""Deploy the CrowdStrike Falcon SIEM Collector to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent.\n1. [Follow these instructions](https://www.crowdstrike.com/blog/tech-center/integrate-with-your-siem/) to deploy the SIEM Collector and forward syslog\n2. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/template_CrowdStrikeFalconEndpointProtectionAma.json","true" +"CrowdStrike_Additional_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Requirements: \n In order to use the Falcon Data Replicator feature the following are required: \n 1. **Subscription:** \n 1.1. Falcon Data Replicator. \n 1.2. Falcon Insight XDR. \n 2. **Roles:** \n 2.1. Falcon Administrator.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Setup your CrowdStrike & AWS environments \n To configure access on AWS, use the following two templates provided to set up the AWS environment. This will enable sending logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018Specify template\u2019 option, then \u2018Upload a template file\u2019 by clicking on \u2018Choose file\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018Choose file\u2019 and select the downloaded template. \n 3. Click 'Next' and 'Create stack'.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Make sure that your bucket will be created in the same AWS region as your Falcon CID where the FDR feed is provisioned. \n | CrowdStrike region | AWS region | \n |-----------------|-----------|\n | US-1 | us-west-1 |\n | US-2 | us-west-2 | \n | EU-1 | eu-central-1 ""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS CrowdStrike resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CrowdStrike""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Using your own S3 Bucket \n In order to use your own S3 bucket you can refernace the following guide [Use your own S3 bucket](https://falcon.us-2.crowdstrike.com/documentation/page/fa572b1c/falcon-data-replicator#g4f79236) or follow this steps: \n 1. Create support case with the following Name: **Using Self S3 bucket for FDR** \n 2. Add the following information: \n 2.1. The Falcon CID where your FDR feed is provisioned \n 2.2. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.3. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.4. Do not use any partitions. ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Primary Events | data/ |\n | Secondary Events | fdrv2/ ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CrowdstrikeStream"", ""text"": ""Primary Events""}, {""key"": ""Custom-CrowdStrikeSecondary"", ""text"": ""Secondary Events""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" +"CrowdStrike_Audit_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Requirements: \n In order to use the Falcon Data Replicator feature the following are required: \n 1. **Subscription:** \n 1.1. Falcon Data Replicator. \n 1.2. Falcon Insight XDR. \n 2. **Roles:** \n 2.1. Falcon Administrator.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Setup your CrowdStrike & AWS environments \n To configure access on AWS, use the following two templates provided to set up the AWS environment. This will enable sending logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018Specify template\u2019 option, then \u2018Upload a template file\u2019 by clicking on \u2018Choose file\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018Choose file\u2019 and select the downloaded template. \n 3. Click 'Next' and 'Create stack'.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Make sure that your bucket will be created in the same AWS region as your Falcon CID where the FDR feed is provisioned. \n | CrowdStrike region | AWS region | \n |-----------------|-----------|\n | US-1 | us-west-1 |\n | US-2 | us-west-2 | \n | EU-1 | eu-central-1 ""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS CrowdStrike resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CrowdStrike""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Using your own S3 Bucket \n In order to use your own S3 bucket you can refernace the following guide [Use your own S3 bucket](https://falcon.us-2.crowdstrike.com/documentation/page/fa572b1c/falcon-data-replicator#g4f79236) or follow this steps: \n 1. Create support case with the following Name: **Using Self S3 bucket for FDR** \n 2. Add the following information: \n 2.1. The Falcon CID where your FDR feed is provisioned \n 2.2. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.3. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.4. Do not use any partitions. ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Primary Events | data/ |\n | Secondary Events | fdrv2/ ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CrowdstrikeStream"", ""text"": ""Primary Events""}, {""key"": ""Custom-CrowdStrikeSecondary"", ""text"": ""Secondary Events""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" +"CrowdStrike_Auth_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Requirements: \n In order to use the Falcon Data Replicator feature the following are required: \n 1. **Subscription:** \n 1.1. Falcon Data Replicator. \n 1.2. Falcon Insight XDR. \n 2. **Roles:** \n 2.1. Falcon Administrator.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Setup your CrowdStrike & AWS environments \n To configure access on AWS, use the following two templates provided to set up the AWS environment. This will enable sending logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018Specify template\u2019 option, then \u2018Upload a template file\u2019 by clicking on \u2018Choose file\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018Choose file\u2019 and select the downloaded template. \n 3. Click 'Next' and 'Create stack'.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Make sure that your bucket will be created in the same AWS region as your Falcon CID where the FDR feed is provisioned. \n | CrowdStrike region | AWS region | \n |-----------------|-----------|\n | US-1 | us-west-1 |\n | US-2 | us-west-2 | \n | EU-1 | eu-central-1 ""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS CrowdStrike resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CrowdStrike""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Using your own S3 Bucket \n In order to use your own S3 bucket you can refernace the following guide [Use your own S3 bucket](https://falcon.us-2.crowdstrike.com/documentation/page/fa572b1c/falcon-data-replicator#g4f79236) or follow this steps: \n 1. Create support case with the following Name: **Using Self S3 bucket for FDR** \n 2. Add the following information: \n 2.1. The Falcon CID where your FDR feed is provisioned \n 2.2. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.3. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.4. Do not use any partitions. ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Primary Events | data/ |\n | Secondary Events | fdrv2/ ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CrowdstrikeStream"", ""text"": ""Primary Events""}, {""key"": ""Custom-CrowdStrikeSecondary"", ""text"": ""Secondary Events""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" +"CrowdStrike_DNS_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Requirements: \n In order to use the Falcon Data Replicator feature the following are required: \n 1. **Subscription:** \n 1.1. Falcon Data Replicator. \n 1.2. Falcon Insight XDR. \n 2. **Roles:** \n 2.1. Falcon Administrator.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Setup your CrowdStrike & AWS environments \n To configure access on AWS, use the following two templates provided to set up the AWS environment. This will enable sending logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018Specify template\u2019 option, then \u2018Upload a template file\u2019 by clicking on \u2018Choose file\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018Choose file\u2019 and select the downloaded template. \n 3. Click 'Next' and 'Create stack'.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Make sure that your bucket will be created in the same AWS region as your Falcon CID where the FDR feed is provisioned. \n | CrowdStrike region | AWS region | \n |-----------------|-----------|\n | US-1 | us-west-1 |\n | US-2 | us-west-2 | \n | EU-1 | eu-central-1 ""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS CrowdStrike resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CrowdStrike""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Using your own S3 Bucket \n In order to use your own S3 bucket you can refernace the following guide [Use your own S3 bucket](https://falcon.us-2.crowdstrike.com/documentation/page/fa572b1c/falcon-data-replicator#g4f79236) or follow this steps: \n 1. Create support case with the following Name: **Using Self S3 bucket for FDR** \n 2. Add the following information: \n 2.1. The Falcon CID where your FDR feed is provisioned \n 2.2. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.3. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.4. Do not use any partitions. ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Primary Events | data/ |\n | Secondary Events | fdrv2/ ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CrowdstrikeStream"", ""text"": ""Primary Events""}, {""key"": ""Custom-CrowdStrikeSecondary"", ""text"": ""Secondary Events""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" +"CrowdStrike_File_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Requirements: \n In order to use the Falcon Data Replicator feature the following are required: \n 1. **Subscription:** \n 1.1. Falcon Data Replicator. \n 1.2. Falcon Insight XDR. \n 2. **Roles:** \n 2.1. Falcon Administrator.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Setup your CrowdStrike & AWS environments \n To configure access on AWS, use the following two templates provided to set up the AWS environment. This will enable sending logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018Specify template\u2019 option, then \u2018Upload a template file\u2019 by clicking on \u2018Choose file\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018Choose file\u2019 and select the downloaded template. \n 3. Click 'Next' and 'Create stack'.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Make sure that your bucket will be created in the same AWS region as your Falcon CID where the FDR feed is provisioned. \n | CrowdStrike region | AWS region | \n |-----------------|-----------|\n | US-1 | us-west-1 |\n | US-2 | us-west-2 | \n | EU-1 | eu-central-1 ""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS CrowdStrike resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CrowdStrike""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Using your own S3 Bucket \n In order to use your own S3 bucket you can refernace the following guide [Use your own S3 bucket](https://falcon.us-2.crowdstrike.com/documentation/page/fa572b1c/falcon-data-replicator#g4f79236) or follow this steps: \n 1. Create support case with the following Name: **Using Self S3 bucket for FDR** \n 2. Add the following information: \n 2.1. The Falcon CID where your FDR feed is provisioned \n 2.2. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.3. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.4. Do not use any partitions. ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Primary Events | data/ |\n | Secondary Events | fdrv2/ ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CrowdstrikeStream"", ""text"": ""Primary Events""}, {""key"": ""Custom-CrowdStrikeSecondary"", ""text"": ""Secondary Events""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" +"CrowdStrike_Network_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Requirements: \n In order to use the Falcon Data Replicator feature the following are required: \n 1. **Subscription:** \n 1.1. Falcon Data Replicator. \n 1.2. Falcon Insight XDR. \n 2. **Roles:** \n 2.1. Falcon Administrator.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Setup your CrowdStrike & AWS environments \n To configure access on AWS, use the following two templates provided to set up the AWS environment. This will enable sending logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018Specify template\u2019 option, then \u2018Upload a template file\u2019 by clicking on \u2018Choose file\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018Choose file\u2019 and select the downloaded template. \n 3. Click 'Next' and 'Create stack'.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Make sure that your bucket will be created in the same AWS region as your Falcon CID where the FDR feed is provisioned. \n | CrowdStrike region | AWS region | \n |-----------------|-----------|\n | US-1 | us-west-1 |\n | US-2 | us-west-2 | \n | EU-1 | eu-central-1 ""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS CrowdStrike resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CrowdStrike""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Using your own S3 Bucket \n In order to use your own S3 bucket you can refernace the following guide [Use your own S3 bucket](https://falcon.us-2.crowdstrike.com/documentation/page/fa572b1c/falcon-data-replicator#g4f79236) or follow this steps: \n 1. Create support case with the following Name: **Using Self S3 bucket for FDR** \n 2. Add the following information: \n 2.1. The Falcon CID where your FDR feed is provisioned \n 2.2. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.3. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.4. Do not use any partitions. ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Primary Events | data/ |\n | Secondary Events | fdrv2/ ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CrowdstrikeStream"", ""text"": ""Primary Events""}, {""key"": ""Custom-CrowdStrikeSecondary"", ""text"": ""Secondary Events""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" +"CrowdStrike_Process_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Requirements: \n In order to use the Falcon Data Replicator feature the following are required: \n 1. **Subscription:** \n 1.1. Falcon Data Replicator. \n 1.2. Falcon Insight XDR. \n 2. **Roles:** \n 2.1. Falcon Administrator.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Setup your CrowdStrike & AWS environments \n To configure access on AWS, use the following two templates provided to set up the AWS environment. This will enable sending logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018Specify template\u2019 option, then \u2018Upload a template file\u2019 by clicking on \u2018Choose file\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018Choose file\u2019 and select the downloaded template. \n 3. Click 'Next' and 'Create stack'.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Make sure that your bucket will be created in the same AWS region as your Falcon CID where the FDR feed is provisioned. \n | CrowdStrike region | AWS region | \n |-----------------|-----------|\n | US-1 | us-west-1 |\n | US-2 | us-west-2 | \n | EU-1 | eu-central-1 ""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS CrowdStrike resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CrowdStrike""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Using your own S3 Bucket \n In order to use your own S3 bucket you can refernace the following guide [Use your own S3 bucket](https://falcon.us-2.crowdstrike.com/documentation/page/fa572b1c/falcon-data-replicator#g4f79236) or follow this steps: \n 1. Create support case with the following Name: **Using Self S3 bucket for FDR** \n 2. Add the following information: \n 2.1. The Falcon CID where your FDR feed is provisioned \n 2.2. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.3. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.4. Do not use any partitions. ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Primary Events | data/ |\n | Secondary Events | fdrv2/ ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CrowdstrikeStream"", ""text"": ""Primary Events""}, {""key"": ""Custom-CrowdStrikeSecondary"", ""text"": ""Secondary Events""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" +"CrowdStrike_Registry_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Requirements: \n In order to use the Falcon Data Replicator feature the following are required: \n 1. **Subscription:** \n 1.1. Falcon Data Replicator. \n 1.2. Falcon Insight XDR. \n 2. **Roles:** \n 2.1. Falcon Administrator.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Setup your CrowdStrike & AWS environments \n To configure access on AWS, use the following two templates provided to set up the AWS environment. This will enable sending logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018Specify template\u2019 option, then \u2018Upload a template file\u2019 by clicking on \u2018Choose file\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018Choose file\u2019 and select the downloaded template. \n 3. Click 'Next' and 'Create stack'.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Make sure that your bucket will be created in the same AWS region as your Falcon CID where the FDR feed is provisioned. \n | CrowdStrike region | AWS region | \n |-----------------|-----------|\n | US-1 | us-west-1 |\n | US-2 | us-west-2 | \n | EU-1 | eu-central-1 ""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS CrowdStrike resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CrowdStrike""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Using your own S3 Bucket \n In order to use your own S3 bucket you can refernace the following guide [Use your own S3 bucket](https://falcon.us-2.crowdstrike.com/documentation/page/fa572b1c/falcon-data-replicator#g4f79236) or follow this steps: \n 1. Create support case with the following Name: **Using Self S3 bucket for FDR** \n 2. Add the following information: \n 2.1. The Falcon CID where your FDR feed is provisioned \n 2.2. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.3. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.4. Do not use any partitions. ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Primary Events | data/ |\n | Secondary Events | fdrv2/ ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CrowdstrikeStream"", ""text"": ""Primary Events""}, {""key"": ""Custom-CrowdStrikeSecondary"", ""text"": ""Secondary Events""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" +"CrowdStrike_Secondary_Data_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Requirements: \n In order to use the Falcon Data Replicator feature the following are required: \n 1. **Subscription:** \n 1.1. Falcon Data Replicator. \n 1.2. Falcon Insight XDR. \n 2. **Roles:** \n 2.1. Falcon Administrator.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Setup your CrowdStrike & AWS environments \n To configure access on AWS, use the following two templates provided to set up the AWS environment. This will enable sending logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018Specify template\u2019 option, then \u2018Upload a template file\u2019 by clicking on \u2018Choose file\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018Choose file\u2019 and select the downloaded template. \n 3. Click 'Next' and 'Create stack'.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Make sure that your bucket will be created in the same AWS region as your Falcon CID where the FDR feed is provisioned. \n | CrowdStrike region | AWS region | \n |-----------------|-----------|\n | US-1 | us-west-1 |\n | US-2 | us-west-2 | \n | EU-1 | eu-central-1 ""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS CrowdStrike resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CrowdStrike""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Using your own S3 Bucket \n In order to use your own S3 bucket you can refernace the following guide [Use your own S3 bucket](https://falcon.us-2.crowdstrike.com/documentation/page/fa572b1c/falcon-data-replicator#g4f79236) or follow this steps: \n 1. Create support case with the following Name: **Using Self S3 bucket for FDR** \n 2. Add the following information: \n 2.1. The Falcon CID where your FDR feed is provisioned \n 2.2. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.3. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.4. Do not use any partitions. ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Primary Events | data/ |\n | Secondary Events | fdrv2/ ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CrowdstrikeStream"", ""text"": ""Primary Events""}, {""key"": ""Custom-CrowdStrikeSecondary"", ""text"": ""Secondary Events""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" +"CrowdStrike_User_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Requirements: \n In order to use the Falcon Data Replicator feature the following are required: \n 1. **Subscription:** \n 1.1. Falcon Data Replicator. \n 1.2. Falcon Insight XDR. \n 2. **Roles:** \n 2.1. Falcon Administrator.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Setup your CrowdStrike & AWS environments \n To configure access on AWS, use the following two templates provided to set up the AWS environment. This will enable sending logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018Specify template\u2019 option, then \u2018Upload a template file\u2019 by clicking on \u2018Choose file\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018Choose file\u2019 and select the downloaded template. \n 3. Click 'Next' and 'Create stack'.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Make sure that your bucket will be created in the same AWS region as your Falcon CID where the FDR feed is provisioned. \n | CrowdStrike region | AWS region | \n |-----------------|-----------|\n | US-1 | us-west-1 |\n | US-2 | us-west-2 | \n | EU-1 | eu-central-1 ""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS CrowdStrike resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CrowdStrike""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Using your own S3 Bucket \n In order to use your own S3 bucket you can refernace the following guide [Use your own S3 bucket](https://falcon.us-2.crowdstrike.com/documentation/page/fa572b1c/falcon-data-replicator#g4f79236) or follow this steps: \n 1. Create support case with the following Name: **Using Self S3 bucket for FDR** \n 2. Add the following information: \n 2.1. The Falcon CID where your FDR feed is provisioned \n 2.2. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.3. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.4. Do not use any partitions. ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Primary Events | data/ |\n | Secondary Events | fdrv2/ ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CrowdstrikeStream"", ""text"": ""Primary Events""}, {""key"": ""Custom-CrowdStrikeSecondary"", ""text"": ""Secondary Events""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" +"ASimAuditEventLogs","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Configure FDR in CrowdStrike - You must contact the [CrowdStrike support team](https://supportportal.crowdstrike.com/) to enable CrowdStrike FDR.\n\t - Once CrowdStrike FDR is enabled, from the CrowdStrike console, navigate to Support --> API Clients and Keys. \n\t - You need to Create new credentials to copy the AWS Access Key ID, AWS Secret Access Key, SQS Queue URL and AWS Region. \n2. Register AAD application - For DCR to authentiate to ingest data into log analytics, you must use AAD application. \n\t - [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n\t - For **AAD Principal** Id of this application, access the AAD App through [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and capture Object Id from the application overview page.""}, {""title"": ""Deployment Options"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Crowdstrike Falcon Data Replicator connector V2 using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-gov) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, CrowdStrike AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Crowdstrike Falcon Data Replicator connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy DCE, DCR and Custom Tables for data ingestion**\n\n1. Deploy the required DCE, DCR(s) and the Custom Tables by using the [Data Collection Resource ARM template](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-data-resource) \n2. After successful deployment of DCE and DCR(s), get the below information and keep it handy (required during Azure Functions app deployment).\n\t - DCE log ingestion - Follow the instructions available at [Create data collection endpoint](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-data-collection-endpoint) (Step 3).\n\t - Immutable Ids of one or more DCRs (as applicable) - Follow the instructions available at [Collect information from the DCR](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#collect-information-from-the-dcr) (Stpe 2).""}, {""title"": """", ""description"": ""**2. Deploy a Function App**\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAWS_KEY\n\t\tAWS_SECRET\n\t\tAWS_REGION_NAME\n\t\tQUEUE_URL\n\t\tUSER_SELECTION_REQUIRE_RAW //True if raw data is required\n\t\tUSER_SELECTION_REQUIRE_SECONDARY //True if secondary data is required\n\t\tMAX_QUEUE_MESSAGES_MAIN_QUEUE // 100 for consumption and 150 for Premium\n\t\tMAX_SCRIPT_EXEC_TIME_MINUTES // add the value of 10 here\n\t\tAZURE_TENANT_ID\n\t\tAZURE_CLIENT_ID\n\t\tAZURE_CLIENT_SECRET\n\t\tDCE_INGESTION_ENDPOINT\n\t\tNORMALIZED_DCR_ID\n\t\tRAW_DATA_DCR_ID\n\t\tEVENT_TO_TABLE_MAPPING_LINK // File is present on github. Add if the file can be accessed using internet\n\t\tREQUIRED_FIELDS_SCHEMA_LINK //File is present on github. Add if the file can be accessed using internet\n\t\tSchedule //Add value as '0 */1 * * * *' to ensure the function runs every minute.\n5. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](https://www.crowdstrike.com/blog/tech-center/intro-to-falcon-data-replicator/). To start, contact CrowdStrike support. At your request they will create a CrowdStrike managed Amazon Web Services (AWS) S3 bucket for short term storage purposes as well as a SQS (simple queue service) account for monitoring changes to the S3 bucket.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" +"ASimAuthenticationEventLogs","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Configure FDR in CrowdStrike - You must contact the [CrowdStrike support team](https://supportportal.crowdstrike.com/) to enable CrowdStrike FDR.\n\t - Once CrowdStrike FDR is enabled, from the CrowdStrike console, navigate to Support --> API Clients and Keys. \n\t - You need to Create new credentials to copy the AWS Access Key ID, AWS Secret Access Key, SQS Queue URL and AWS Region. \n2. Register AAD application - For DCR to authentiate to ingest data into log analytics, you must use AAD application. \n\t - [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n\t - For **AAD Principal** Id of this application, access the AAD App through [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and capture Object Id from the application overview page.""}, {""title"": ""Deployment Options"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Crowdstrike Falcon Data Replicator connector V2 using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-gov) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, CrowdStrike AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Crowdstrike Falcon Data Replicator connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy DCE, DCR and Custom Tables for data ingestion**\n\n1. Deploy the required DCE, DCR(s) and the Custom Tables by using the [Data Collection Resource ARM template](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-data-resource) \n2. After successful deployment of DCE and DCR(s), get the below information and keep it handy (required during Azure Functions app deployment).\n\t - DCE log ingestion - Follow the instructions available at [Create data collection endpoint](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-data-collection-endpoint) (Step 3).\n\t - Immutable Ids of one or more DCRs (as applicable) - Follow the instructions available at [Collect information from the DCR](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#collect-information-from-the-dcr) (Stpe 2).""}, {""title"": """", ""description"": ""**2. Deploy a Function App**\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAWS_KEY\n\t\tAWS_SECRET\n\t\tAWS_REGION_NAME\n\t\tQUEUE_URL\n\t\tUSER_SELECTION_REQUIRE_RAW //True if raw data is required\n\t\tUSER_SELECTION_REQUIRE_SECONDARY //True if secondary data is required\n\t\tMAX_QUEUE_MESSAGES_MAIN_QUEUE // 100 for consumption and 150 for Premium\n\t\tMAX_SCRIPT_EXEC_TIME_MINUTES // add the value of 10 here\n\t\tAZURE_TENANT_ID\n\t\tAZURE_CLIENT_ID\n\t\tAZURE_CLIENT_SECRET\n\t\tDCE_INGESTION_ENDPOINT\n\t\tNORMALIZED_DCR_ID\n\t\tRAW_DATA_DCR_ID\n\t\tEVENT_TO_TABLE_MAPPING_LINK // File is present on github. Add if the file can be accessed using internet\n\t\tREQUIRED_FIELDS_SCHEMA_LINK //File is present on github. Add if the file can be accessed using internet\n\t\tSchedule //Add value as '0 */1 * * * *' to ensure the function runs every minute.\n5. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](https://www.crowdstrike.com/blog/tech-center/intro-to-falcon-data-replicator/). To start, contact CrowdStrike support. At your request they will create a CrowdStrike managed Amazon Web Services (AWS) S3 bucket for short term storage purposes as well as a SQS (simple queue service) account for monitoring changes to the S3 bucket.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" +"ASimAuthenticationEventLogs_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Configure FDR in CrowdStrike - You must contact the [CrowdStrike support team](https://supportportal.crowdstrike.com/) to enable CrowdStrike FDR.\n\t - Once CrowdStrike FDR is enabled, from the CrowdStrike console, navigate to Support --> API Clients and Keys. \n\t - You need to Create new credentials to copy the AWS Access Key ID, AWS Secret Access Key, SQS Queue URL and AWS Region. \n2. Register AAD application - For DCR to authentiate to ingest data into log analytics, you must use AAD application. \n\t - [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n\t - For **AAD Principal** Id of this application, access the AAD App through [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and capture Object Id from the application overview page.""}, {""title"": ""Deployment Options"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Crowdstrike Falcon Data Replicator connector V2 using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-gov) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, CrowdStrike AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Crowdstrike Falcon Data Replicator connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy DCE, DCR and Custom Tables for data ingestion**\n\n1. Deploy the required DCE, DCR(s) and the Custom Tables by using the [Data Collection Resource ARM template](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-data-resource) \n2. After successful deployment of DCE and DCR(s), get the below information and keep it handy (required during Azure Functions app deployment).\n\t - DCE log ingestion - Follow the instructions available at [Create data collection endpoint](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-data-collection-endpoint) (Step 3).\n\t - Immutable Ids of one or more DCRs (as applicable) - Follow the instructions available at [Collect information from the DCR](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#collect-information-from-the-dcr) (Stpe 2).""}, {""title"": """", ""description"": ""**2. Deploy a Function App**\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAWS_KEY\n\t\tAWS_SECRET\n\t\tAWS_REGION_NAME\n\t\tQUEUE_URL\n\t\tUSER_SELECTION_REQUIRE_RAW //True if raw data is required\n\t\tUSER_SELECTION_REQUIRE_SECONDARY //True if secondary data is required\n\t\tMAX_QUEUE_MESSAGES_MAIN_QUEUE // 100 for consumption and 150 for Premium\n\t\tMAX_SCRIPT_EXEC_TIME_MINUTES // add the value of 10 here\n\t\tAZURE_TENANT_ID\n\t\tAZURE_CLIENT_ID\n\t\tAZURE_CLIENT_SECRET\n\t\tDCE_INGESTION_ENDPOINT\n\t\tNORMALIZED_DCR_ID\n\t\tRAW_DATA_DCR_ID\n\t\tEVENT_TO_TABLE_MAPPING_LINK // File is present on github. Add if the file can be accessed using internet\n\t\tREQUIRED_FIELDS_SCHEMA_LINK //File is present on github. Add if the file can be accessed using internet\n\t\tSchedule //Add value as '0 */1 * * * *' to ensure the function runs every minute.\n5. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](https://www.crowdstrike.com/blog/tech-center/intro-to-falcon-data-replicator/). To start, contact CrowdStrike support. At your request they will create a CrowdStrike managed Amazon Web Services (AWS) S3 bucket for short term storage purposes as well as a SQS (simple queue service) account for monitoring changes to the S3 bucket.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" +"ASimDnsActivityLogs","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Configure FDR in CrowdStrike - You must contact the [CrowdStrike support team](https://supportportal.crowdstrike.com/) to enable CrowdStrike FDR.\n\t - Once CrowdStrike FDR is enabled, from the CrowdStrike console, navigate to Support --> API Clients and Keys. \n\t - You need to Create new credentials to copy the AWS Access Key ID, AWS Secret Access Key, SQS Queue URL and AWS Region. \n2. Register AAD application - For DCR to authentiate to ingest data into log analytics, you must use AAD application. \n\t - [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n\t - For **AAD Principal** Id of this application, access the AAD App through [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and capture Object Id from the application overview page.""}, {""title"": ""Deployment Options"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Crowdstrike Falcon Data Replicator connector V2 using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-gov) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, CrowdStrike AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Crowdstrike Falcon Data Replicator connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy DCE, DCR and Custom Tables for data ingestion**\n\n1. Deploy the required DCE, DCR(s) and the Custom Tables by using the [Data Collection Resource ARM template](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-data-resource) \n2. After successful deployment of DCE and DCR(s), get the below information and keep it handy (required during Azure Functions app deployment).\n\t - DCE log ingestion - Follow the instructions available at [Create data collection endpoint](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-data-collection-endpoint) (Step 3).\n\t - Immutable Ids of one or more DCRs (as applicable) - Follow the instructions available at [Collect information from the DCR](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#collect-information-from-the-dcr) (Stpe 2).""}, {""title"": """", ""description"": ""**2. Deploy a Function App**\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAWS_KEY\n\t\tAWS_SECRET\n\t\tAWS_REGION_NAME\n\t\tQUEUE_URL\n\t\tUSER_SELECTION_REQUIRE_RAW //True if raw data is required\n\t\tUSER_SELECTION_REQUIRE_SECONDARY //True if secondary data is required\n\t\tMAX_QUEUE_MESSAGES_MAIN_QUEUE // 100 for consumption and 150 for Premium\n\t\tMAX_SCRIPT_EXEC_TIME_MINUTES // add the value of 10 here\n\t\tAZURE_TENANT_ID\n\t\tAZURE_CLIENT_ID\n\t\tAZURE_CLIENT_SECRET\n\t\tDCE_INGESTION_ENDPOINT\n\t\tNORMALIZED_DCR_ID\n\t\tRAW_DATA_DCR_ID\n\t\tEVENT_TO_TABLE_MAPPING_LINK // File is present on github. Add if the file can be accessed using internet\n\t\tREQUIRED_FIELDS_SCHEMA_LINK //File is present on github. Add if the file can be accessed using internet\n\t\tSchedule //Add value as '0 */1 * * * *' to ensure the function runs every minute.\n5. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](https://www.crowdstrike.com/blog/tech-center/intro-to-falcon-data-replicator/). To start, contact CrowdStrike support. At your request they will create a CrowdStrike managed Amazon Web Services (AWS) S3 bucket for short term storage purposes as well as a SQS (simple queue service) account for monitoring changes to the S3 bucket.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" +"ASimFileEventLogs","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Configure FDR in CrowdStrike - You must contact the [CrowdStrike support team](https://supportportal.crowdstrike.com/) to enable CrowdStrike FDR.\n\t - Once CrowdStrike FDR is enabled, from the CrowdStrike console, navigate to Support --> API Clients and Keys. \n\t - You need to Create new credentials to copy the AWS Access Key ID, AWS Secret Access Key, SQS Queue URL and AWS Region. \n2. Register AAD application - For DCR to authentiate to ingest data into log analytics, you must use AAD application. \n\t - [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n\t - For **AAD Principal** Id of this application, access the AAD App through [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and capture Object Id from the application overview page.""}, {""title"": ""Deployment Options"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Crowdstrike Falcon Data Replicator connector V2 using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-gov) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, CrowdStrike AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Crowdstrike Falcon Data Replicator connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy DCE, DCR and Custom Tables for data ingestion**\n\n1. Deploy the required DCE, DCR(s) and the Custom Tables by using the [Data Collection Resource ARM template](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-data-resource) \n2. After successful deployment of DCE and DCR(s), get the below information and keep it handy (required during Azure Functions app deployment).\n\t - DCE log ingestion - Follow the instructions available at [Create data collection endpoint](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-data-collection-endpoint) (Step 3).\n\t - Immutable Ids of one or more DCRs (as applicable) - Follow the instructions available at [Collect information from the DCR](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#collect-information-from-the-dcr) (Stpe 2).""}, {""title"": """", ""description"": ""**2. Deploy a Function App**\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAWS_KEY\n\t\tAWS_SECRET\n\t\tAWS_REGION_NAME\n\t\tQUEUE_URL\n\t\tUSER_SELECTION_REQUIRE_RAW //True if raw data is required\n\t\tUSER_SELECTION_REQUIRE_SECONDARY //True if secondary data is required\n\t\tMAX_QUEUE_MESSAGES_MAIN_QUEUE // 100 for consumption and 150 for Premium\n\t\tMAX_SCRIPT_EXEC_TIME_MINUTES // add the value of 10 here\n\t\tAZURE_TENANT_ID\n\t\tAZURE_CLIENT_ID\n\t\tAZURE_CLIENT_SECRET\n\t\tDCE_INGESTION_ENDPOINT\n\t\tNORMALIZED_DCR_ID\n\t\tRAW_DATA_DCR_ID\n\t\tEVENT_TO_TABLE_MAPPING_LINK // File is present on github. Add if the file can be accessed using internet\n\t\tREQUIRED_FIELDS_SCHEMA_LINK //File is present on github. Add if the file can be accessed using internet\n\t\tSchedule //Add value as '0 */1 * * * *' to ensure the function runs every minute.\n5. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](https://www.crowdstrike.com/blog/tech-center/intro-to-falcon-data-replicator/). To start, contact CrowdStrike support. At your request they will create a CrowdStrike managed Amazon Web Services (AWS) S3 bucket for short term storage purposes as well as a SQS (simple queue service) account for monitoring changes to the S3 bucket.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" +"ASimFileEventLogs_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Configure FDR in CrowdStrike - You must contact the [CrowdStrike support team](https://supportportal.crowdstrike.com/) to enable CrowdStrike FDR.\n\t - Once CrowdStrike FDR is enabled, from the CrowdStrike console, navigate to Support --> API Clients and Keys. \n\t - You need to Create new credentials to copy the AWS Access Key ID, AWS Secret Access Key, SQS Queue URL and AWS Region. \n2. Register AAD application - For DCR to authentiate to ingest data into log analytics, you must use AAD application. \n\t - [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n\t - For **AAD Principal** Id of this application, access the AAD App through [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and capture Object Id from the application overview page.""}, {""title"": ""Deployment Options"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Crowdstrike Falcon Data Replicator connector V2 using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-gov) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, CrowdStrike AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Crowdstrike Falcon Data Replicator connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy DCE, DCR and Custom Tables for data ingestion**\n\n1. Deploy the required DCE, DCR(s) and the Custom Tables by using the [Data Collection Resource ARM template](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-data-resource) \n2. After successful deployment of DCE and DCR(s), get the below information and keep it handy (required during Azure Functions app deployment).\n\t - DCE log ingestion - Follow the instructions available at [Create data collection endpoint](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-data-collection-endpoint) (Step 3).\n\t - Immutable Ids of one or more DCRs (as applicable) - Follow the instructions available at [Collect information from the DCR](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#collect-information-from-the-dcr) (Stpe 2).""}, {""title"": """", ""description"": ""**2. Deploy a Function App**\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAWS_KEY\n\t\tAWS_SECRET\n\t\tAWS_REGION_NAME\n\t\tQUEUE_URL\n\t\tUSER_SELECTION_REQUIRE_RAW //True if raw data is required\n\t\tUSER_SELECTION_REQUIRE_SECONDARY //True if secondary data is required\n\t\tMAX_QUEUE_MESSAGES_MAIN_QUEUE // 100 for consumption and 150 for Premium\n\t\tMAX_SCRIPT_EXEC_TIME_MINUTES // add the value of 10 here\n\t\tAZURE_TENANT_ID\n\t\tAZURE_CLIENT_ID\n\t\tAZURE_CLIENT_SECRET\n\t\tDCE_INGESTION_ENDPOINT\n\t\tNORMALIZED_DCR_ID\n\t\tRAW_DATA_DCR_ID\n\t\tEVENT_TO_TABLE_MAPPING_LINK // File is present on github. Add if the file can be accessed using internet\n\t\tREQUIRED_FIELDS_SCHEMA_LINK //File is present on github. Add if the file can be accessed using internet\n\t\tSchedule //Add value as '0 */1 * * * *' to ensure the function runs every minute.\n5. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](https://www.crowdstrike.com/blog/tech-center/intro-to-falcon-data-replicator/). To start, contact CrowdStrike support. At your request they will create a CrowdStrike managed Amazon Web Services (AWS) S3 bucket for short term storage purposes as well as a SQS (simple queue service) account for monitoring changes to the S3 bucket.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" +"ASimNetworkSessionLogs","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Configure FDR in CrowdStrike - You must contact the [CrowdStrike support team](https://supportportal.crowdstrike.com/) to enable CrowdStrike FDR.\n\t - Once CrowdStrike FDR is enabled, from the CrowdStrike console, navigate to Support --> API Clients and Keys. \n\t - You need to Create new credentials to copy the AWS Access Key ID, AWS Secret Access Key, SQS Queue URL and AWS Region. \n2. Register AAD application - For DCR to authentiate to ingest data into log analytics, you must use AAD application. \n\t - [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n\t - For **AAD Principal** Id of this application, access the AAD App through [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and capture Object Id from the application overview page.""}, {""title"": ""Deployment Options"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Crowdstrike Falcon Data Replicator connector V2 using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-gov) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, CrowdStrike AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Crowdstrike Falcon Data Replicator connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy DCE, DCR and Custom Tables for data ingestion**\n\n1. Deploy the required DCE, DCR(s) and the Custom Tables by using the [Data Collection Resource ARM template](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-data-resource) \n2. After successful deployment of DCE and DCR(s), get the below information and keep it handy (required during Azure Functions app deployment).\n\t - DCE log ingestion - Follow the instructions available at [Create data collection endpoint](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-data-collection-endpoint) (Step 3).\n\t - Immutable Ids of one or more DCRs (as applicable) - Follow the instructions available at [Collect information from the DCR](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#collect-information-from-the-dcr) (Stpe 2).""}, {""title"": """", ""description"": ""**2. Deploy a Function App**\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAWS_KEY\n\t\tAWS_SECRET\n\t\tAWS_REGION_NAME\n\t\tQUEUE_URL\n\t\tUSER_SELECTION_REQUIRE_RAW //True if raw data is required\n\t\tUSER_SELECTION_REQUIRE_SECONDARY //True if secondary data is required\n\t\tMAX_QUEUE_MESSAGES_MAIN_QUEUE // 100 for consumption and 150 for Premium\n\t\tMAX_SCRIPT_EXEC_TIME_MINUTES // add the value of 10 here\n\t\tAZURE_TENANT_ID\n\t\tAZURE_CLIENT_ID\n\t\tAZURE_CLIENT_SECRET\n\t\tDCE_INGESTION_ENDPOINT\n\t\tNORMALIZED_DCR_ID\n\t\tRAW_DATA_DCR_ID\n\t\tEVENT_TO_TABLE_MAPPING_LINK // File is present on github. Add if the file can be accessed using internet\n\t\tREQUIRED_FIELDS_SCHEMA_LINK //File is present on github. Add if the file can be accessed using internet\n\t\tSchedule //Add value as '0 */1 * * * *' to ensure the function runs every minute.\n5. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](https://www.crowdstrike.com/blog/tech-center/intro-to-falcon-data-replicator/). To start, contact CrowdStrike support. At your request they will create a CrowdStrike managed Amazon Web Services (AWS) S3 bucket for short term storage purposes as well as a SQS (simple queue service) account for monitoring changes to the S3 bucket.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" +"ASimProcessEventLogs","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Configure FDR in CrowdStrike - You must contact the [CrowdStrike support team](https://supportportal.crowdstrike.com/) to enable CrowdStrike FDR.\n\t - Once CrowdStrike FDR is enabled, from the CrowdStrike console, navigate to Support --> API Clients and Keys. \n\t - You need to Create new credentials to copy the AWS Access Key ID, AWS Secret Access Key, SQS Queue URL and AWS Region. \n2. Register AAD application - For DCR to authentiate to ingest data into log analytics, you must use AAD application. \n\t - [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n\t - For **AAD Principal** Id of this application, access the AAD App through [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and capture Object Id from the application overview page.""}, {""title"": ""Deployment Options"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Crowdstrike Falcon Data Replicator connector V2 using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-gov) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, CrowdStrike AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Crowdstrike Falcon Data Replicator connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy DCE, DCR and Custom Tables for data ingestion**\n\n1. Deploy the required DCE, DCR(s) and the Custom Tables by using the [Data Collection Resource ARM template](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-data-resource) \n2. After successful deployment of DCE and DCR(s), get the below information and keep it handy (required during Azure Functions app deployment).\n\t - DCE log ingestion - Follow the instructions available at [Create data collection endpoint](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-data-collection-endpoint) (Step 3).\n\t - Immutable Ids of one or more DCRs (as applicable) - Follow the instructions available at [Collect information from the DCR](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#collect-information-from-the-dcr) (Stpe 2).""}, {""title"": """", ""description"": ""**2. Deploy a Function App**\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAWS_KEY\n\t\tAWS_SECRET\n\t\tAWS_REGION_NAME\n\t\tQUEUE_URL\n\t\tUSER_SELECTION_REQUIRE_RAW //True if raw data is required\n\t\tUSER_SELECTION_REQUIRE_SECONDARY //True if secondary data is required\n\t\tMAX_QUEUE_MESSAGES_MAIN_QUEUE // 100 for consumption and 150 for Premium\n\t\tMAX_SCRIPT_EXEC_TIME_MINUTES // add the value of 10 here\n\t\tAZURE_TENANT_ID\n\t\tAZURE_CLIENT_ID\n\t\tAZURE_CLIENT_SECRET\n\t\tDCE_INGESTION_ENDPOINT\n\t\tNORMALIZED_DCR_ID\n\t\tRAW_DATA_DCR_ID\n\t\tEVENT_TO_TABLE_MAPPING_LINK // File is present on github. Add if the file can be accessed using internet\n\t\tREQUIRED_FIELDS_SCHEMA_LINK //File is present on github. Add if the file can be accessed using internet\n\t\tSchedule //Add value as '0 */1 * * * *' to ensure the function runs every minute.\n5. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](https://www.crowdstrike.com/blog/tech-center/intro-to-falcon-data-replicator/). To start, contact CrowdStrike support. At your request they will create a CrowdStrike managed Amazon Web Services (AWS) S3 bucket for short term storage purposes as well as a SQS (simple queue service) account for monitoring changes to the S3 bucket.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" +"ASimProcessEventLogs_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Configure FDR in CrowdStrike - You must contact the [CrowdStrike support team](https://supportportal.crowdstrike.com/) to enable CrowdStrike FDR.\n\t - Once CrowdStrike FDR is enabled, from the CrowdStrike console, navigate to Support --> API Clients and Keys. \n\t - You need to Create new credentials to copy the AWS Access Key ID, AWS Secret Access Key, SQS Queue URL and AWS Region. \n2. Register AAD application - For DCR to authentiate to ingest data into log analytics, you must use AAD application. \n\t - [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n\t - For **AAD Principal** Id of this application, access the AAD App through [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and capture Object Id from the application overview page.""}, {""title"": ""Deployment Options"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Crowdstrike Falcon Data Replicator connector V2 using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-gov) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, CrowdStrike AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Crowdstrike Falcon Data Replicator connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy DCE, DCR and Custom Tables for data ingestion**\n\n1. Deploy the required DCE, DCR(s) and the Custom Tables by using the [Data Collection Resource ARM template](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-data-resource) \n2. After successful deployment of DCE and DCR(s), get the below information and keep it handy (required during Azure Functions app deployment).\n\t - DCE log ingestion - Follow the instructions available at [Create data collection endpoint](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-data-collection-endpoint) (Step 3).\n\t - Immutable Ids of one or more DCRs (as applicable) - Follow the instructions available at [Collect information from the DCR](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#collect-information-from-the-dcr) (Stpe 2).""}, {""title"": """", ""description"": ""**2. Deploy a Function App**\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAWS_KEY\n\t\tAWS_SECRET\n\t\tAWS_REGION_NAME\n\t\tQUEUE_URL\n\t\tUSER_SELECTION_REQUIRE_RAW //True if raw data is required\n\t\tUSER_SELECTION_REQUIRE_SECONDARY //True if secondary data is required\n\t\tMAX_QUEUE_MESSAGES_MAIN_QUEUE // 100 for consumption and 150 for Premium\n\t\tMAX_SCRIPT_EXEC_TIME_MINUTES // add the value of 10 here\n\t\tAZURE_TENANT_ID\n\t\tAZURE_CLIENT_ID\n\t\tAZURE_CLIENT_SECRET\n\t\tDCE_INGESTION_ENDPOINT\n\t\tNORMALIZED_DCR_ID\n\t\tRAW_DATA_DCR_ID\n\t\tEVENT_TO_TABLE_MAPPING_LINK // File is present on github. Add if the file can be accessed using internet\n\t\tREQUIRED_FIELDS_SCHEMA_LINK //File is present on github. Add if the file can be accessed using internet\n\t\tSchedule //Add value as '0 */1 * * * *' to ensure the function runs every minute.\n5. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](https://www.crowdstrike.com/blog/tech-center/intro-to-falcon-data-replicator/). To start, contact CrowdStrike support. At your request they will create a CrowdStrike managed Amazon Web Services (AWS) S3 bucket for short term storage purposes as well as a SQS (simple queue service) account for monitoring changes to the S3 bucket.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" +"ASimRegistryEventLogs","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Configure FDR in CrowdStrike - You must contact the [CrowdStrike support team](https://supportportal.crowdstrike.com/) to enable CrowdStrike FDR.\n\t - Once CrowdStrike FDR is enabled, from the CrowdStrike console, navigate to Support --> API Clients and Keys. \n\t - You need to Create new credentials to copy the AWS Access Key ID, AWS Secret Access Key, SQS Queue URL and AWS Region. \n2. Register AAD application - For DCR to authentiate to ingest data into log analytics, you must use AAD application. \n\t - [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n\t - For **AAD Principal** Id of this application, access the AAD App through [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and capture Object Id from the application overview page.""}, {""title"": ""Deployment Options"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Crowdstrike Falcon Data Replicator connector V2 using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-gov) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, CrowdStrike AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Crowdstrike Falcon Data Replicator connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy DCE, DCR and Custom Tables for data ingestion**\n\n1. Deploy the required DCE, DCR(s) and the Custom Tables by using the [Data Collection Resource ARM template](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-data-resource) \n2. After successful deployment of DCE and DCR(s), get the below information and keep it handy (required during Azure Functions app deployment).\n\t - DCE log ingestion - Follow the instructions available at [Create data collection endpoint](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-data-collection-endpoint) (Step 3).\n\t - Immutable Ids of one or more DCRs (as applicable) - Follow the instructions available at [Collect information from the DCR](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#collect-information-from-the-dcr) (Stpe 2).""}, {""title"": """", ""description"": ""**2. Deploy a Function App**\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAWS_KEY\n\t\tAWS_SECRET\n\t\tAWS_REGION_NAME\n\t\tQUEUE_URL\n\t\tUSER_SELECTION_REQUIRE_RAW //True if raw data is required\n\t\tUSER_SELECTION_REQUIRE_SECONDARY //True if secondary data is required\n\t\tMAX_QUEUE_MESSAGES_MAIN_QUEUE // 100 for consumption and 150 for Premium\n\t\tMAX_SCRIPT_EXEC_TIME_MINUTES // add the value of 10 here\n\t\tAZURE_TENANT_ID\n\t\tAZURE_CLIENT_ID\n\t\tAZURE_CLIENT_SECRET\n\t\tDCE_INGESTION_ENDPOINT\n\t\tNORMALIZED_DCR_ID\n\t\tRAW_DATA_DCR_ID\n\t\tEVENT_TO_TABLE_MAPPING_LINK // File is present on github. Add if the file can be accessed using internet\n\t\tREQUIRED_FIELDS_SCHEMA_LINK //File is present on github. Add if the file can be accessed using internet\n\t\tSchedule //Add value as '0 */1 * * * *' to ensure the function runs every minute.\n5. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](https://www.crowdstrike.com/blog/tech-center/intro-to-falcon-data-replicator/). To start, contact CrowdStrike support. At your request they will create a CrowdStrike managed Amazon Web Services (AWS) S3 bucket for short term storage purposes as well as a SQS (simple queue service) account for monitoring changes to the S3 bucket.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" +"ASimRegistryEventLogs_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Configure FDR in CrowdStrike - You must contact the [CrowdStrike support team](https://supportportal.crowdstrike.com/) to enable CrowdStrike FDR.\n\t - Once CrowdStrike FDR is enabled, from the CrowdStrike console, navigate to Support --> API Clients and Keys. \n\t - You need to Create new credentials to copy the AWS Access Key ID, AWS Secret Access Key, SQS Queue URL and AWS Region. \n2. Register AAD application - For DCR to authentiate to ingest data into log analytics, you must use AAD application. \n\t - [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n\t - For **AAD Principal** Id of this application, access the AAD App through [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and capture Object Id from the application overview page.""}, {""title"": ""Deployment Options"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Crowdstrike Falcon Data Replicator connector V2 using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-gov) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, CrowdStrike AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Crowdstrike Falcon Data Replicator connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy DCE, DCR and Custom Tables for data ingestion**\n\n1. Deploy the required DCE, DCR(s) and the Custom Tables by using the [Data Collection Resource ARM template](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-data-resource) \n2. After successful deployment of DCE and DCR(s), get the below information and keep it handy (required during Azure Functions app deployment).\n\t - DCE log ingestion - Follow the instructions available at [Create data collection endpoint](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-data-collection-endpoint) (Step 3).\n\t - Immutable Ids of one or more DCRs (as applicable) - Follow the instructions available at [Collect information from the DCR](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#collect-information-from-the-dcr) (Stpe 2).""}, {""title"": """", ""description"": ""**2. Deploy a Function App**\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAWS_KEY\n\t\tAWS_SECRET\n\t\tAWS_REGION_NAME\n\t\tQUEUE_URL\n\t\tUSER_SELECTION_REQUIRE_RAW //True if raw data is required\n\t\tUSER_SELECTION_REQUIRE_SECONDARY //True if secondary data is required\n\t\tMAX_QUEUE_MESSAGES_MAIN_QUEUE // 100 for consumption and 150 for Premium\n\t\tMAX_SCRIPT_EXEC_TIME_MINUTES // add the value of 10 here\n\t\tAZURE_TENANT_ID\n\t\tAZURE_CLIENT_ID\n\t\tAZURE_CLIENT_SECRET\n\t\tDCE_INGESTION_ENDPOINT\n\t\tNORMALIZED_DCR_ID\n\t\tRAW_DATA_DCR_ID\n\t\tEVENT_TO_TABLE_MAPPING_LINK // File is present on github. Add if the file can be accessed using internet\n\t\tREQUIRED_FIELDS_SCHEMA_LINK //File is present on github. Add if the file can be accessed using internet\n\t\tSchedule //Add value as '0 */1 * * * *' to ensure the function runs every minute.\n5. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](https://www.crowdstrike.com/blog/tech-center/intro-to-falcon-data-replicator/). To start, contact CrowdStrike support. At your request they will create a CrowdStrike managed Amazon Web Services (AWS) S3 bucket for short term storage purposes as well as a SQS (simple queue service) account for monitoring changes to the S3 bucket.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" +"ASimUserManagementActivityLogs","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Configure FDR in CrowdStrike - You must contact the [CrowdStrike support team](https://supportportal.crowdstrike.com/) to enable CrowdStrike FDR.\n\t - Once CrowdStrike FDR is enabled, from the CrowdStrike console, navigate to Support --> API Clients and Keys. \n\t - You need to Create new credentials to copy the AWS Access Key ID, AWS Secret Access Key, SQS Queue URL and AWS Region. \n2. Register AAD application - For DCR to authentiate to ingest data into log analytics, you must use AAD application. \n\t - [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n\t - For **AAD Principal** Id of this application, access the AAD App through [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and capture Object Id from the application overview page.""}, {""title"": ""Deployment Options"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Crowdstrike Falcon Data Replicator connector V2 using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-gov) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, CrowdStrike AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Crowdstrike Falcon Data Replicator connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy DCE, DCR and Custom Tables for data ingestion**\n\n1. Deploy the required DCE, DCR(s) and the Custom Tables by using the [Data Collection Resource ARM template](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-data-resource) \n2. After successful deployment of DCE and DCR(s), get the below information and keep it handy (required during Azure Functions app deployment).\n\t - DCE log ingestion - Follow the instructions available at [Create data collection endpoint](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-data-collection-endpoint) (Step 3).\n\t - Immutable Ids of one or more DCRs (as applicable) - Follow the instructions available at [Collect information from the DCR](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#collect-information-from-the-dcr) (Stpe 2).""}, {""title"": """", ""description"": ""**2. Deploy a Function App**\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAWS_KEY\n\t\tAWS_SECRET\n\t\tAWS_REGION_NAME\n\t\tQUEUE_URL\n\t\tUSER_SELECTION_REQUIRE_RAW //True if raw data is required\n\t\tUSER_SELECTION_REQUIRE_SECONDARY //True if secondary data is required\n\t\tMAX_QUEUE_MESSAGES_MAIN_QUEUE // 100 for consumption and 150 for Premium\n\t\tMAX_SCRIPT_EXEC_TIME_MINUTES // add the value of 10 here\n\t\tAZURE_TENANT_ID\n\t\tAZURE_CLIENT_ID\n\t\tAZURE_CLIENT_SECRET\n\t\tDCE_INGESTION_ENDPOINT\n\t\tNORMALIZED_DCR_ID\n\t\tRAW_DATA_DCR_ID\n\t\tEVENT_TO_TABLE_MAPPING_LINK // File is present on github. Add if the file can be accessed using internet\n\t\tREQUIRED_FIELDS_SCHEMA_LINK //File is present on github. Add if the file can be accessed using internet\n\t\tSchedule //Add value as '0 */1 * * * *' to ensure the function runs every minute.\n5. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](https://www.crowdstrike.com/blog/tech-center/intro-to-falcon-data-replicator/). To start, contact CrowdStrike support. At your request they will create a CrowdStrike managed Amazon Web Services (AWS) S3 bucket for short term storage purposes as well as a SQS (simple queue service) account for monitoring changes to the S3 bucket.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" +"ASimUserManagementLogs_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Configure FDR in CrowdStrike - You must contact the [CrowdStrike support team](https://supportportal.crowdstrike.com/) to enable CrowdStrike FDR.\n\t - Once CrowdStrike FDR is enabled, from the CrowdStrike console, navigate to Support --> API Clients and Keys. \n\t - You need to Create new credentials to copy the AWS Access Key ID, AWS Secret Access Key, SQS Queue URL and AWS Region. \n2. Register AAD application - For DCR to authentiate to ingest data into log analytics, you must use AAD application. \n\t - [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n\t - For **AAD Principal** Id of this application, access the AAD App through [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and capture Object Id from the application overview page.""}, {""title"": ""Deployment Options"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Crowdstrike Falcon Data Replicator connector V2 using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-gov) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, CrowdStrike AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Crowdstrike Falcon Data Replicator connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy DCE, DCR and Custom Tables for data ingestion**\n\n1. Deploy the required DCE, DCR(s) and the Custom Tables by using the [Data Collection Resource ARM template](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-data-resource) \n2. After successful deployment of DCE and DCR(s), get the below information and keep it handy (required during Azure Functions app deployment).\n\t - DCE log ingestion - Follow the instructions available at [Create data collection endpoint](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-data-collection-endpoint) (Step 3).\n\t - Immutable Ids of one or more DCRs (as applicable) - Follow the instructions available at [Collect information from the DCR](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#collect-information-from-the-dcr) (Stpe 2).""}, {""title"": """", ""description"": ""**2. Deploy a Function App**\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAWS_KEY\n\t\tAWS_SECRET\n\t\tAWS_REGION_NAME\n\t\tQUEUE_URL\n\t\tUSER_SELECTION_REQUIRE_RAW //True if raw data is required\n\t\tUSER_SELECTION_REQUIRE_SECONDARY //True if secondary data is required\n\t\tMAX_QUEUE_MESSAGES_MAIN_QUEUE // 100 for consumption and 150 for Premium\n\t\tMAX_SCRIPT_EXEC_TIME_MINUTES // add the value of 10 here\n\t\tAZURE_TENANT_ID\n\t\tAZURE_CLIENT_ID\n\t\tAZURE_CLIENT_SECRET\n\t\tDCE_INGESTION_ENDPOINT\n\t\tNORMALIZED_DCR_ID\n\t\tRAW_DATA_DCR_ID\n\t\tEVENT_TO_TABLE_MAPPING_LINK // File is present on github. Add if the file can be accessed using internet\n\t\tREQUIRED_FIELDS_SCHEMA_LINK //File is present on github. Add if the file can be accessed using internet\n\t\tSchedule //Add value as '0 */1 * * * *' to ensure the function runs every minute.\n5. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](https://www.crowdstrike.com/blog/tech-center/intro-to-falcon-data-replicator/). To start, contact CrowdStrike support. At your request they will create a CrowdStrike managed Amazon Web Services (AWS) S3 bucket for short term storage purposes as well as a SQS (simple queue service) account for monitoring changes to the S3 bucket.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" +"CrowdStrike_Additional_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Configure FDR in CrowdStrike - You must contact the [CrowdStrike support team](https://supportportal.crowdstrike.com/) to enable CrowdStrike FDR.\n\t - Once CrowdStrike FDR is enabled, from the CrowdStrike console, navigate to Support --> API Clients and Keys. \n\t - You need to Create new credentials to copy the AWS Access Key ID, AWS Secret Access Key, SQS Queue URL and AWS Region. \n2. Register AAD application - For DCR to authentiate to ingest data into log analytics, you must use AAD application. \n\t - [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n\t - For **AAD Principal** Id of this application, access the AAD App through [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and capture Object Id from the application overview page.""}, {""title"": ""Deployment Options"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Crowdstrike Falcon Data Replicator connector V2 using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-gov) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, CrowdStrike AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Crowdstrike Falcon Data Replicator connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy DCE, DCR and Custom Tables for data ingestion**\n\n1. Deploy the required DCE, DCR(s) and the Custom Tables by using the [Data Collection Resource ARM template](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-data-resource) \n2. After successful deployment of DCE and DCR(s), get the below information and keep it handy (required during Azure Functions app deployment).\n\t - DCE log ingestion - Follow the instructions available at [Create data collection endpoint](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-data-collection-endpoint) (Step 3).\n\t - Immutable Ids of one or more DCRs (as applicable) - Follow the instructions available at [Collect information from the DCR](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#collect-information-from-the-dcr) (Stpe 2).""}, {""title"": """", ""description"": ""**2. Deploy a Function App**\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAWS_KEY\n\t\tAWS_SECRET\n\t\tAWS_REGION_NAME\n\t\tQUEUE_URL\n\t\tUSER_SELECTION_REQUIRE_RAW //True if raw data is required\n\t\tUSER_SELECTION_REQUIRE_SECONDARY //True if secondary data is required\n\t\tMAX_QUEUE_MESSAGES_MAIN_QUEUE // 100 for consumption and 150 for Premium\n\t\tMAX_SCRIPT_EXEC_TIME_MINUTES // add the value of 10 here\n\t\tAZURE_TENANT_ID\n\t\tAZURE_CLIENT_ID\n\t\tAZURE_CLIENT_SECRET\n\t\tDCE_INGESTION_ENDPOINT\n\t\tNORMALIZED_DCR_ID\n\t\tRAW_DATA_DCR_ID\n\t\tEVENT_TO_TABLE_MAPPING_LINK // File is present on github. Add if the file can be accessed using internet\n\t\tREQUIRED_FIELDS_SCHEMA_LINK //File is present on github. Add if the file can be accessed using internet\n\t\tSchedule //Add value as '0 */1 * * * *' to ensure the function runs every minute.\n5. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](https://www.crowdstrike.com/blog/tech-center/intro-to-falcon-data-replicator/). To start, contact CrowdStrike support. At your request they will create a CrowdStrike managed Amazon Web Services (AWS) S3 bucket for short term storage purposes as well as a SQS (simple queue service) account for monitoring changes to the S3 bucket.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" +"CrowdStrike_Secondary_Data_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Configure FDR in CrowdStrike - You must contact the [CrowdStrike support team](https://supportportal.crowdstrike.com/) to enable CrowdStrike FDR.\n\t - Once CrowdStrike FDR is enabled, from the CrowdStrike console, navigate to Support --> API Clients and Keys. \n\t - You need to Create new credentials to copy the AWS Access Key ID, AWS Secret Access Key, SQS Queue URL and AWS Region. \n2. Register AAD application - For DCR to authentiate to ingest data into log analytics, you must use AAD application. \n\t - [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n\t - For **AAD Principal** Id of this application, access the AAD App through [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and capture Object Id from the application overview page.""}, {""title"": ""Deployment Options"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Crowdstrike Falcon Data Replicator connector V2 using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-gov) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, CrowdStrike AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Crowdstrike Falcon Data Replicator connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy DCE, DCR and Custom Tables for data ingestion**\n\n1. Deploy the required DCE, DCR(s) and the Custom Tables by using the [Data Collection Resource ARM template](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-data-resource) \n2. After successful deployment of DCE and DCR(s), get the below information and keep it handy (required during Azure Functions app deployment).\n\t - DCE log ingestion - Follow the instructions available at [Create data collection endpoint](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-data-collection-endpoint) (Step 3).\n\t - Immutable Ids of one or more DCRs (as applicable) - Follow the instructions available at [Collect information from the DCR](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#collect-information-from-the-dcr) (Stpe 2).""}, {""title"": """", ""description"": ""**2. Deploy a Function App**\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAWS_KEY\n\t\tAWS_SECRET\n\t\tAWS_REGION_NAME\n\t\tQUEUE_URL\n\t\tUSER_SELECTION_REQUIRE_RAW //True if raw data is required\n\t\tUSER_SELECTION_REQUIRE_SECONDARY //True if secondary data is required\n\t\tMAX_QUEUE_MESSAGES_MAIN_QUEUE // 100 for consumption and 150 for Premium\n\t\tMAX_SCRIPT_EXEC_TIME_MINUTES // add the value of 10 here\n\t\tAZURE_TENANT_ID\n\t\tAZURE_CLIENT_ID\n\t\tAZURE_CLIENT_SECRET\n\t\tDCE_INGESTION_ENDPOINT\n\t\tNORMALIZED_DCR_ID\n\t\tRAW_DATA_DCR_ID\n\t\tEVENT_TO_TABLE_MAPPING_LINK // File is present on github. Add if the file can be accessed using internet\n\t\tREQUIRED_FIELDS_SCHEMA_LINK //File is present on github. Add if the file can be accessed using internet\n\t\tSchedule //Add value as '0 */1 * * * *' to ensure the function runs every minute.\n5. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](https://www.crowdstrike.com/blog/tech-center/intro-to-falcon-data-replicator/). To start, contact CrowdStrike support. At your request they will create a CrowdStrike managed Amazon Web Services (AWS) S3 bucket for short term storage purposes as well as a SQS (simple queue service) account for monitoring changes to the S3 bucket.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" +"","CustomLogsAma","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CustomLogsAma","azuresentinel","azure-sentinel-solution-customlogsviaama","2024-07-21","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"CommonSecurityLog","CyberArk Enterprise Password Vault (EPV) Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArk%20Enterprise%20Password%20Vault%20%28EPV%29%20Events","cyberark","cyberark_epv_events_mss","2022-05-02","","","Cyberark","Partner","https://www.cyberark.com/services-support/technical-support/","","domains","CyberArk","Cyber-Ark","[Deprecated] CyberArk Enterprise Password Vault (EPV) Events via Legacy Agent","CyberArk Enterprise Password Vault generates an xml Syslog message for every action taken against the Vault. The EPV will send the xml messages through the Microsoft Sentinel.xsl translator to be converted into CEF standard format and sent to a syslog staging server of your choice (syslog-ng, rsyslog). The Log Analytics agent installed on your syslog staging server will import the messages into Microsoft Log Analytics. Refer to the [CyberArk documentation](https://docs.cyberark.com/Product-Doc/OnlineHelp/PAS/Latest/en/Content/PASIMP/DV-Integrating-with-SIEM-Applications.htm) for more guidance on SIEM integrations.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python installed on your machine.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""On the EPV configure the dbparm.ini to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machines IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python installed on your machine using the following command: python -version\n\n>\n\n> 2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machines security according to your organizations security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArk%20Enterprise%20Password%20Vault%20%28EPV%29%20Events/Data%20Connectors/CyberArk%20Data%20Connector.json","true" +"CommonSecurityLog","CyberArk Enterprise Password Vault (EPV) Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArk%20Enterprise%20Password%20Vault%20%28EPV%29%20Events","cyberark","cyberark_epv_events_mss","2022-05-02","","","Cyberark","Partner","https://www.cyberark.com/services-support/technical-support/","","domains","CyberArkAma","Cyber-Ark","[Deprecated] CyberArk Privilege Access Manager (PAM) Events via AMA","CyberArk Privilege Access Manager generates an xml Syslog message for every action taken against the Vault. The PAM will send the xml messages through the Microsoft Sentinel.xsl translator to be converted into CEF standard format and sent to a syslog staging server of your choice (syslog-ng, rsyslog). The Log Analytics agent installed on your syslog staging server will import the messages into Microsoft Log Analytics. Refer to the [CyberArk documentation](https://docs.cyberark.com/privilege-cloud-standard/Latest/en/Content/Privilege%20Cloud/privCloud-connect-siem.htm) for more guidance on SIEM integrations.","[{""title"": """", ""description"": """", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""On the EPV configure the dbparm.ini to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machines IP address."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machines security according to your organizations security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArk%20Enterprise%20Password%20Vault%20%28EPV%29%20Events/Data%20Connectors/template_CyberArkAMA.json","true" +"CyberArkAudit","CyberArkAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArkAudit","cyberark","cyberark_audit_sentinel","2024-03-01","","","CyberArk Support","Partner","https://www.cyberark.com/services-support/technical-support-contact/","","domains","CyberArkAudit","CyberArk","CyberArkAudit","The [CyberArk Audit](https://docs.cyberark.com/Audit/Latest/en/Content/Resources/_TopNav/cc_Home.htm) data connector provides the capability to retrieve security event logs of the CyberArk Audit service and more events into Microsoft Sentinel through the REST API. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Azure Blob Storage API to pull logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details.""}, {""title"": """", ""description"": "">**NOTE:** API authorization key(s) or token(s) are securely stored in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the CyberArk Audit SIEM Integration**\n\n Follow the [instructions](https://docs.cyberark.com/audit/latest/en/Content/Audit/isp_Microsoft_Sentinel.htm?tocpath=SIEM%20integrations%7C_____3) to obtain connection details and credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the CyberArk Audit data connector, have the Workspace Name and Workspace Location (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""WorkspaceLocation""], ""label"": ""Workspace Location""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the CyberArk Audit data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CyberArkAuditAPI-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **CyberArkAuditUsername**, **CyberArkAuditPassword**, **CyberArkAuditServerURL** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the CyberArk Audit data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CyberArkAudit-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CyberArkXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.10.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tCyberArkAuditUsername\n\t\tCyberArkAuditPassword\n\t\tCyberArkAuditServerURL\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Audit REST API Connections details and Credentials"", ""description"": ""**OauthUsername**, **OauthPassword**, **WebAppID**, **AuditApiKey**, **IdentityEndpoint** and **AuditApiBaseUrl** are required for making API calls.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArkAudit/Data%20Connectors/CyberArkAudit_API_FunctionApp.json","true" +"CyberArk_AuditEvents_CL","CyberArkAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArkAudit","cyberark","cyberark_audit_sentinel","2024-03-01","","","CyberArk Support","Partner","https://www.cyberark.com/services-support/technical-support-contact/","","domains","CyberArkAudit","CyberArk","CyberArkAudit","The [CyberArk Audit](https://docs.cyberark.com/Audit/Latest/en/Content/Resources/_TopNav/cc_Home.htm) data connector provides the capability to retrieve security event logs of the CyberArk Audit service and more events into Microsoft Sentinel through the REST API. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Azure Blob Storage API to pull logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details.""}, {""title"": """", ""description"": "">**NOTE:** API authorization key(s) or token(s) are securely stored in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the CyberArk Audit SIEM Integration**\n\n Follow the [instructions](https://docs.cyberark.com/audit/latest/en/Content/Audit/isp_Microsoft_Sentinel.htm?tocpath=SIEM%20integrations%7C_____3) to obtain connection details and credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the CyberArk Audit data connector, have the Workspace Name and Workspace Location (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""WorkspaceLocation""], ""label"": ""Workspace Location""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the CyberArk Audit data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CyberArkAuditAPI-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **CyberArkAuditUsername**, **CyberArkAuditPassword**, **CyberArkAuditServerURL** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the CyberArk Audit data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CyberArkAudit-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CyberArkXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.10.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tCyberArkAuditUsername\n\t\tCyberArkAuditPassword\n\t\tCyberArkAuditServerURL\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Audit REST API Connections details and Credentials"", ""description"": ""**OauthUsername**, **OauthPassword**, **WebAppID**, **AuditApiKey**, **IdentityEndpoint** and **AuditApiBaseUrl** are required for making API calls.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArkAudit/Data%20Connectors/CyberArkAudit_API_FunctionApp.json","true" +"","CyberArkEPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArkEPM","cyberark","cybr_epm_sentinel","2022-04-10","","","CyberArk Support","Partner","https://www.cyberark.com/services-support/technical-support-contact/","","domains","","","","","","","","false" +"","CybersecurityMaturityModelCertification(CMMC)2.0","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CybersecurityMaturityModelCertification%28CMMC%292.0","azuresentinel","azure-sentinel-solution-cmmcv2","2022-01-06","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"CyberSixgill_Alerts_CL","Cybersixgill-Actionable-Alerts","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cybersixgill-Actionable-Alerts","cybersixgill1657701397011","azure-sentinel-cybersixgill-actionable-alerts","2023-02-27","2024-09-24","","Cybersixgill","Partner","https://www.cybersixgill.com/","","domains","CybersixgillActionableAlerts","Cybersixgill","Cybersixgill Actionable Alerts","Actionable alerts provide customized alerts based on configured assets","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Cybersixgill API to pull Alerts into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cybersixgill Actionable Alerts data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/senitnel-cybersixgill-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **Client ID**, **Client Secret**, **TimeInterval** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cybersixgill Actionable Alerts data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> NOTE:You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cybersixgill-Actionable-Alerts/Data%20Connectors/CybersixgillAlerts.zip?raw=true) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CybersixgillAlertsXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tClientID\n\t\tClientSecret\n\t\tPolling\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Client_ID** and **Client_Secret** are required for making API calls.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cybersixgill-Actionable-Alerts/Data%20Connectors/Cybersixgill_FunctionApp.json","true" +"","Cyble Vision","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyble%20Vision","cybleinc1737472004964","cybleinc1737472004964-azure-sentinel-offerid","2025-05-05","","","Cyble Support","Partner","https://cyble.com/talk-to-sales/","","domains","","","","","","","","false" +"SecurityEvent","Cyborg Security HUNTER","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyborg%20Security%20HUNTER","cyborgsecurityinc1689265652101","azure-sentinel-solution-cyborgsecurity-hunter","2023-07-03","2023-09-22","","Cyborg Security","Partner","https://hunter.cyborgsecurity.io/customer-support","","domains","CyborgSecurity_HUNTER","Cyborg Security","Cyborg Security HUNTER Hunt Packages","Cyborg Security is a leading provider of advanced threat hunting solutions, with a mission to empower organizations with cutting-edge technology and collaborative tools to proactively detect and respond to cyber threats. Cyborg Security's flagship offering, the HUNTER Platform, combines powerful analytics, curated threat hunting content, and comprehensive hunt management capabilities to create a dynamic ecosystem for effective threat hunting operations.

Follow the steps to gain access to Cyborg Security's Community and setup the 'Open in Tool' capabilities in the HUNTER Platform.","[{""instructions"": [{""parameters"": {""text"": ""Use the following link to find your Azure Tentant ID How to find your Azure Active Directory tenant ID"", ""visible"": true, ""inline"": true}, ""type"": ""InfoMessage""}, {""parameters"": {""fillWith"": [""workspaceName""], ""label"": ""ResourceGroupName & WorkspaceName"", ""value"": ""{0}""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""WorkspaceID"", ""value"": ""{0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""1. Sign up for Cyborg Security's HUNTER Community Account"", ""description"": ""Cyborg Security offers Community Memebers access to a subset of the Emerging Threat Collections and hunt packages.\n\nCreate a Free Commuinity Account to get access to Cyborg Security's Hunt Packages: [Sign Up Now!](https://www.cyborgsecurity.com/user-account-creation/)""}, {""title"": ""2. Configure the Open in Tool Feature"", ""description"": ""\n\n1. Navigate to the [Environment](https://hunter.cyborgsecurity.io/environment) section of the HUNTER Platform.\n2. Fill in te **Root URI** of your environment in the section labeled **Microsoft Sentinel**. Replace the with the IDs and Names of your Subscription, Resource Groups and Workspaces.\n\n https[]()://portal.azure.com#@**AzureTenantID**/blade/Microsoft_OperationsManagementSuite_Workspace/Logs.ReactView/resourceId/%2Fsubscriptions%2F**AzureSubscriptionID**%2Fresourcegroups%2F**ResourceGroupName**%2Fproviders%2Fmicrosoft.operationalinsights%2Fworkspaces%2F<**WorkspaceName**>/\n3. Click **Save**.""}, {""title"": ""3. Execute a HUNTER hunt pacakge in Microsoft Sentinel"", ""description"": ""\n\nIdentify a Cyborg Security HUNTER hunt package to deploy and use the **Open In Tool** button to quickly open Microsoft Sentinel and stage the hunting content.\n\n![image](https://7924572.fs1.hubspotusercontent-na1.net/hubfs/7924572/HUNTER/Screenshots/openintool-ms-new.png)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyborg%20Security%20HUNTER/Data%20Connectors/CyborgSecurity_HUNTER.json","true" +"CyeraAssets_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyera1658314682323","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraDSPMCCF","Cyera Inc","Cyera DSPM Microsoft Sentinel Data Connector","The [Cyera DSPM](https://api.cyera.io/) data connector allows you to connect to your Cyera's DSPM tenant and ingesting Classifications, Assets, Issues, and Identity Resources/Definitions into Microsoft Sentinel. The data connector is built on Microsoft Sentinel's Codeless Connector Framework and uses the Cyera's API to fetch Cyera's [DSPM Telemetry](https://www.cyera.com/) once received can be correlated with security events creating custom columns so that queries don't need to parse it again, thus resulting in better performance.","[{""description"": ""Connect to your Cyera DSPM tenenant via Personal Access Tokens"", ""instructions"": [{""parameters"": {""label"": ""Cyera Personal Access Token Client ID"", ""name"": ""clientId"", ""placeholder"": ""client_id"", ""type"": ""text""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""Cyera Personal Access Token Secret Key"", ""name"": ""clientSecret"", ""placeholder"": ""secret_key"", ""type"": ""password""}, ""type"": ""Textbox""}, {""parameters"": {""connectLabel"": ""Connect"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""title"": ""Cyera DSPM Authentication""}]","{""resourceProvider"": [{""permissionsDisplayText"": ""Read and Write permissions are required."", ""provider"": ""Microsoft.OperationalInsights/workspaces"", ""providerDisplayName"": ""Workspace"", ""requiredPermissions"": {""action"": false, ""delete"": true, ""read"": true, ""write"": true}, ""scope"": ""Workspace""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_CCF/CyeraDSPMLogs_ConnectorDefinitionCCF.json","true" +"CyeraAssets_MS_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyera1658314682323","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraDSPMCCF","Cyera Inc","Cyera DSPM Microsoft Sentinel Data Connector","The [Cyera DSPM](https://api.cyera.io/) data connector allows you to connect to your Cyera's DSPM tenant and ingesting Classifications, Assets, Issues, and Identity Resources/Definitions into Microsoft Sentinel. The data connector is built on Microsoft Sentinel's Codeless Connector Framework and uses the Cyera's API to fetch Cyera's [DSPM Telemetry](https://www.cyera.com/) once received can be correlated with security events creating custom columns so that queries don't need to parse it again, thus resulting in better performance.","[{""description"": ""Connect to your Cyera DSPM tenenant via Personal Access Tokens"", ""instructions"": [{""parameters"": {""label"": ""Cyera Personal Access Token Client ID"", ""name"": ""clientId"", ""placeholder"": ""client_id"", ""type"": ""text""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""Cyera Personal Access Token Secret Key"", ""name"": ""clientSecret"", ""placeholder"": ""secret_key"", ""type"": ""password""}, ""type"": ""Textbox""}, {""parameters"": {""connectLabel"": ""Connect"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""title"": ""Cyera DSPM Authentication""}]","{""resourceProvider"": [{""permissionsDisplayText"": ""Read and Write permissions are required."", ""provider"": ""Microsoft.OperationalInsights/workspaces"", ""providerDisplayName"": ""Workspace"", ""requiredPermissions"": {""action"": false, ""delete"": true, ""read"": true, ""write"": true}, ""scope"": ""Workspace""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_CCF/CyeraDSPMLogs_ConnectorDefinitionCCF.json","true" +"CyeraClassifications_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyera1658314682323","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraDSPMCCF","Cyera Inc","Cyera DSPM Microsoft Sentinel Data Connector","The [Cyera DSPM](https://api.cyera.io/) data connector allows you to connect to your Cyera's DSPM tenant and ingesting Classifications, Assets, Issues, and Identity Resources/Definitions into Microsoft Sentinel. The data connector is built on Microsoft Sentinel's Codeless Connector Framework and uses the Cyera's API to fetch Cyera's [DSPM Telemetry](https://www.cyera.com/) once received can be correlated with security events creating custom columns so that queries don't need to parse it again, thus resulting in better performance.","[{""description"": ""Connect to your Cyera DSPM tenenant via Personal Access Tokens"", ""instructions"": [{""parameters"": {""label"": ""Cyera Personal Access Token Client ID"", ""name"": ""clientId"", ""placeholder"": ""client_id"", ""type"": ""text""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""Cyera Personal Access Token Secret Key"", ""name"": ""clientSecret"", ""placeholder"": ""secret_key"", ""type"": ""password""}, ""type"": ""Textbox""}, {""parameters"": {""connectLabel"": ""Connect"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""title"": ""Cyera DSPM Authentication""}]","{""resourceProvider"": [{""permissionsDisplayText"": ""Read and Write permissions are required."", ""provider"": ""Microsoft.OperationalInsights/workspaces"", ""providerDisplayName"": ""Workspace"", ""requiredPermissions"": {""action"": false, ""delete"": true, ""read"": true, ""write"": true}, ""scope"": ""Workspace""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_CCF/CyeraDSPMLogs_ConnectorDefinitionCCF.json","true" +"CyeraIdentities_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyera1658314682323","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraDSPMCCF","Cyera Inc","Cyera DSPM Microsoft Sentinel Data Connector","The [Cyera DSPM](https://api.cyera.io/) data connector allows you to connect to your Cyera's DSPM tenant and ingesting Classifications, Assets, Issues, and Identity Resources/Definitions into Microsoft Sentinel. The data connector is built on Microsoft Sentinel's Codeless Connector Framework and uses the Cyera's API to fetch Cyera's [DSPM Telemetry](https://www.cyera.com/) once received can be correlated with security events creating custom columns so that queries don't need to parse it again, thus resulting in better performance.","[{""description"": ""Connect to your Cyera DSPM tenenant via Personal Access Tokens"", ""instructions"": [{""parameters"": {""label"": ""Cyera Personal Access Token Client ID"", ""name"": ""clientId"", ""placeholder"": ""client_id"", ""type"": ""text""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""Cyera Personal Access Token Secret Key"", ""name"": ""clientSecret"", ""placeholder"": ""secret_key"", ""type"": ""password""}, ""type"": ""Textbox""}, {""parameters"": {""connectLabel"": ""Connect"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""title"": ""Cyera DSPM Authentication""}]","{""resourceProvider"": [{""permissionsDisplayText"": ""Read and Write permissions are required."", ""provider"": ""Microsoft.OperationalInsights/workspaces"", ""providerDisplayName"": ""Workspace"", ""requiredPermissions"": {""action"": false, ""delete"": true, ""read"": true, ""write"": true}, ""scope"": ""Workspace""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_CCF/CyeraDSPMLogs_ConnectorDefinitionCCF.json","true" +"CyeraIssues_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyera1658314682323","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraDSPMCCF","Cyera Inc","Cyera DSPM Microsoft Sentinel Data Connector","The [Cyera DSPM](https://api.cyera.io/) data connector allows you to connect to your Cyera's DSPM tenant and ingesting Classifications, Assets, Issues, and Identity Resources/Definitions into Microsoft Sentinel. The data connector is built on Microsoft Sentinel's Codeless Connector Framework and uses the Cyera's API to fetch Cyera's [DSPM Telemetry](https://www.cyera.com/) once received can be correlated with security events creating custom columns so that queries don't need to parse it again, thus resulting in better performance.","[{""description"": ""Connect to your Cyera DSPM tenenant via Personal Access Tokens"", ""instructions"": [{""parameters"": {""label"": ""Cyera Personal Access Token Client ID"", ""name"": ""clientId"", ""placeholder"": ""client_id"", ""type"": ""text""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""Cyera Personal Access Token Secret Key"", ""name"": ""clientSecret"", ""placeholder"": ""secret_key"", ""type"": ""password""}, ""type"": ""Textbox""}, {""parameters"": {""connectLabel"": ""Connect"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""title"": ""Cyera DSPM Authentication""}]","{""resourceProvider"": [{""permissionsDisplayText"": ""Read and Write permissions are required."", ""provider"": ""Microsoft.OperationalInsights/workspaces"", ""providerDisplayName"": ""Workspace"", ""requiredPermissions"": {""action"": false, ""delete"": true, ""read"": true, ""write"": true}, ""scope"": ""Workspace""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_CCF/CyeraDSPMLogs_ConnectorDefinitionCCF.json","true" +"CyeraAssets_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyera1658314682323","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraFunctionsConnector","Cyera Inc","Cyera DSPM Azure Functions Microsoft Sentinel Data Connector","The **Cyera DSPM Azure Function Connector** enables seamless ingestion of Cyera’s **Data Security Posture Management (DSPM)** telemetry — *Assets*, *Identities*, *Issues*, and *Classifications* — into **Microsoft Sentinel**.\n\nThis connector uses an **Azure Function App** to call Cyera’s REST API on a schedule, fetch the latest DSPM telemetry, and send it to Microsoft Sentinel through the **Azure Monitor Logs Ingestion API** via a **Data Collection Endpoint (DCE)** and **Data Collection Rule (DCR, kind: Direct)** — no agents required.\n\n**Tables created/used**\n\n| Entity | Table | Purpose |\n|---|---|---|\n| Assets | `CyeraAssets_CL` | Raw asset metadata and data-store context |\n| Identities | `CyeraIdentities_CL` | Identity definitions and sensitivity context |\n| Issues | `CyeraIssues_CL` | Findings and remediation details |\n| Classifications | `CyeraClassifications_CL` | Data class & sensitivity definitions |\n| MS View | `CyeraAssets_MS_CL` | Normalized asset view for dashboards |\n\n> **Note:** This v7 connector supersedes the earlier CCF-based approach and aligns with Microsoft’s recommended Direct ingestion path for Microsoft Sentinel.","[{""title"": ""Note"", ""description"": "">**NOTE:** This connector uses an **Azure Function App** and the **Azure Monitor Logs Ingestion API** (DCE + DCR, kind: Direct). Function runtime and data egress may incur charges. See [Azure Functions pricing](https://azure.microsoft.com/pricing/details/functions/).""}, {""title"": ""Optional Step"", ""description"": "">**(Optional)** Store Cyera API credentials in **Azure Key Vault** and reference them from the Function App. See [Key Vault references](https://learn.microsoft.com/azure/app-service/app-service-key-vault-references).""}, {""title"": ""STEP 1 \u2014 Prepare Cyera API Access"", ""description"": ""1) Generate a **Personal Access Token** [Generating Personal Access Token](https://support.cyera.io/hc/en-us/articles/19446274608919-Personal-and-API-Tokens) in your Cyera tenant.\\n2) Note **API Base URL**, **Client ID**, and **Client Secret**.""}, {""title"": ""STEP 2 \u2014 Choose ONE deployment option"", ""description"": ""> Before deploying, have these values handy:"", ""instructions"": [{""parameters"": {""fillWith"": [""CyeraDSPMConnector""], ""label"": ""Cyera Function Connector Name""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""{{workspace-location}}""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""{{workspace-location}}""], ""label"": ""Workspace Location""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""https://api.cyera.io""], ""label"": ""Cyera Base URL""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""CyeraClientID""], ""label"": ""Cyera Personal Access Token Client ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""CyeraSecret""], ""label"": ""Cyera Personal Access Token Secret""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1"", ""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the Cyera DSPM Functions and all required resources to support the connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/{{deployment-template-uri)\n2. Select the preferred **FunctionName** and **Workspace Name**. \n3. Enter the **Workspace Location**, **Cyera API Base Url**, **Personal Access Token Client ID**, and **Personal Access Token Secret**. \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 \u2014 Manual Deployment"", ""description"": ""Follow the [install pack\u2019s step-by-step guide]({{userguide-url}}.\\n\\n1) Create/update the 5 custom tables, data collection rule with format `sentinel-dce-`, and data collection endpoint with format `sentinel-dcr-` using the scripts in [install-pack-v0_7_0/scripts]({{deployment-script-zip-url}}).\\n2) Deploy the Azure Function from the repo`s Function folder (Timer-trigger; schedule typically 5\u201315 minutes).\\n3) Configure Function App settings:\\n - `CyeraBaseUrl` \u2014 Cyera API Base URL\\n - `CyeraClientId` \u2014 Client ID (PAT)\\n - `CyeraSecret` \u2014 Client Secret (PAT)\\n - `DCR_IMMUTABLE_ID` \u2014 DCR immutable ID\\n - `DCE_ENDPOINT` \u2014 Logs ingestion endpoint URL\\n - `STREAM_ASSETS`=`Custom-CyeraAssets`, `STREAM_IDENTITIES`=`Custom-CyeraIdentities`, `STREAM_ISSUES`=`Custom-CyeraIssues`, `STREAM_CLASSIFICATIONS`=`Custom-CyeraClassifications`\\n4) Save and Start the Function App.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_Functions/FunctionAppDC.json","true" +"CyeraAssets_MS_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyera1658314682323","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraFunctionsConnector","Cyera Inc","Cyera DSPM Azure Functions Microsoft Sentinel Data Connector","The **Cyera DSPM Azure Function Connector** enables seamless ingestion of Cyera’s **Data Security Posture Management (DSPM)** telemetry — *Assets*, *Identities*, *Issues*, and *Classifications* — into **Microsoft Sentinel**.\n\nThis connector uses an **Azure Function App** to call Cyera’s REST API on a schedule, fetch the latest DSPM telemetry, and send it to Microsoft Sentinel through the **Azure Monitor Logs Ingestion API** via a **Data Collection Endpoint (DCE)** and **Data Collection Rule (DCR, kind: Direct)** — no agents required.\n\n**Tables created/used**\n\n| Entity | Table | Purpose |\n|---|---|---|\n| Assets | `CyeraAssets_CL` | Raw asset metadata and data-store context |\n| Identities | `CyeraIdentities_CL` | Identity definitions and sensitivity context |\n| Issues | `CyeraIssues_CL` | Findings and remediation details |\n| Classifications | `CyeraClassifications_CL` | Data class & sensitivity definitions |\n| MS View | `CyeraAssets_MS_CL` | Normalized asset view for dashboards |\n\n> **Note:** This v7 connector supersedes the earlier CCF-based approach and aligns with Microsoft’s recommended Direct ingestion path for Microsoft Sentinel.","[{""title"": ""Note"", ""description"": "">**NOTE:** This connector uses an **Azure Function App** and the **Azure Monitor Logs Ingestion API** (DCE + DCR, kind: Direct). Function runtime and data egress may incur charges. See [Azure Functions pricing](https://azure.microsoft.com/pricing/details/functions/).""}, {""title"": ""Optional Step"", ""description"": "">**(Optional)** Store Cyera API credentials in **Azure Key Vault** and reference them from the Function App. See [Key Vault references](https://learn.microsoft.com/azure/app-service/app-service-key-vault-references).""}, {""title"": ""STEP 1 \u2014 Prepare Cyera API Access"", ""description"": ""1) Generate a **Personal Access Token** [Generating Personal Access Token](https://support.cyera.io/hc/en-us/articles/19446274608919-Personal-and-API-Tokens) in your Cyera tenant.\\n2) Note **API Base URL**, **Client ID**, and **Client Secret**.""}, {""title"": ""STEP 2 \u2014 Choose ONE deployment option"", ""description"": ""> Before deploying, have these values handy:"", ""instructions"": [{""parameters"": {""fillWith"": [""CyeraDSPMConnector""], ""label"": ""Cyera Function Connector Name""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""{{workspace-location}}""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""{{workspace-location}}""], ""label"": ""Workspace Location""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""https://api.cyera.io""], ""label"": ""Cyera Base URL""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""CyeraClientID""], ""label"": ""Cyera Personal Access Token Client ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""CyeraSecret""], ""label"": ""Cyera Personal Access Token Secret""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1"", ""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the Cyera DSPM Functions and all required resources to support the connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/{{deployment-template-uri)\n2. Select the preferred **FunctionName** and **Workspace Name**. \n3. Enter the **Workspace Location**, **Cyera API Base Url**, **Personal Access Token Client ID**, and **Personal Access Token Secret**. \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 \u2014 Manual Deployment"", ""description"": ""Follow the [install pack\u2019s step-by-step guide]({{userguide-url}}.\\n\\n1) Create/update the 5 custom tables, data collection rule with format `sentinel-dce-`, and data collection endpoint with format `sentinel-dcr-` using the scripts in [install-pack-v0_7_0/scripts]({{deployment-script-zip-url}}).\\n2) Deploy the Azure Function from the repo`s Function folder (Timer-trigger; schedule typically 5\u201315 minutes).\\n3) Configure Function App settings:\\n - `CyeraBaseUrl` \u2014 Cyera API Base URL\\n - `CyeraClientId` \u2014 Client ID (PAT)\\n - `CyeraSecret` \u2014 Client Secret (PAT)\\n - `DCR_IMMUTABLE_ID` \u2014 DCR immutable ID\\n - `DCE_ENDPOINT` \u2014 Logs ingestion endpoint URL\\n - `STREAM_ASSETS`=`Custom-CyeraAssets`, `STREAM_IDENTITIES`=`Custom-CyeraIdentities`, `STREAM_ISSUES`=`Custom-CyeraIssues`, `STREAM_CLASSIFICATIONS`=`Custom-CyeraClassifications`\\n4) Save and Start the Function App.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_Functions/FunctionAppDC.json","true" +"CyeraClassifications_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyera1658314682323","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraFunctionsConnector","Cyera Inc","Cyera DSPM Azure Functions Microsoft Sentinel Data Connector","The **Cyera DSPM Azure Function Connector** enables seamless ingestion of Cyera’s **Data Security Posture Management (DSPM)** telemetry — *Assets*, *Identities*, *Issues*, and *Classifications* — into **Microsoft Sentinel**.\n\nThis connector uses an **Azure Function App** to call Cyera’s REST API on a schedule, fetch the latest DSPM telemetry, and send it to Microsoft Sentinel through the **Azure Monitor Logs Ingestion API** via a **Data Collection Endpoint (DCE)** and **Data Collection Rule (DCR, kind: Direct)** — no agents required.\n\n**Tables created/used**\n\n| Entity | Table | Purpose |\n|---|---|---|\n| Assets | `CyeraAssets_CL` | Raw asset metadata and data-store context |\n| Identities | `CyeraIdentities_CL` | Identity definitions and sensitivity context |\n| Issues | `CyeraIssues_CL` | Findings and remediation details |\n| Classifications | `CyeraClassifications_CL` | Data class & sensitivity definitions |\n| MS View | `CyeraAssets_MS_CL` | Normalized asset view for dashboards |\n\n> **Note:** This v7 connector supersedes the earlier CCF-based approach and aligns with Microsoft’s recommended Direct ingestion path for Microsoft Sentinel.","[{""title"": ""Note"", ""description"": "">**NOTE:** This connector uses an **Azure Function App** and the **Azure Monitor Logs Ingestion API** (DCE + DCR, kind: Direct). Function runtime and data egress may incur charges. See [Azure Functions pricing](https://azure.microsoft.com/pricing/details/functions/).""}, {""title"": ""Optional Step"", ""description"": "">**(Optional)** Store Cyera API credentials in **Azure Key Vault** and reference them from the Function App. See [Key Vault references](https://learn.microsoft.com/azure/app-service/app-service-key-vault-references).""}, {""title"": ""STEP 1 \u2014 Prepare Cyera API Access"", ""description"": ""1) Generate a **Personal Access Token** [Generating Personal Access Token](https://support.cyera.io/hc/en-us/articles/19446274608919-Personal-and-API-Tokens) in your Cyera tenant.\\n2) Note **API Base URL**, **Client ID**, and **Client Secret**.""}, {""title"": ""STEP 2 \u2014 Choose ONE deployment option"", ""description"": ""> Before deploying, have these values handy:"", ""instructions"": [{""parameters"": {""fillWith"": [""CyeraDSPMConnector""], ""label"": ""Cyera Function Connector Name""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""{{workspace-location}}""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""{{workspace-location}}""], ""label"": ""Workspace Location""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""https://api.cyera.io""], ""label"": ""Cyera Base URL""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""CyeraClientID""], ""label"": ""Cyera Personal Access Token Client ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""CyeraSecret""], ""label"": ""Cyera Personal Access Token Secret""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1"", ""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the Cyera DSPM Functions and all required resources to support the connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/{{deployment-template-uri)\n2. Select the preferred **FunctionName** and **Workspace Name**. \n3. Enter the **Workspace Location**, **Cyera API Base Url**, **Personal Access Token Client ID**, and **Personal Access Token Secret**. \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 \u2014 Manual Deployment"", ""description"": ""Follow the [install pack\u2019s step-by-step guide]({{userguide-url}}.\\n\\n1) Create/update the 5 custom tables, data collection rule with format `sentinel-dce-`, and data collection endpoint with format `sentinel-dcr-` using the scripts in [install-pack-v0_7_0/scripts]({{deployment-script-zip-url}}).\\n2) Deploy the Azure Function from the repo`s Function folder (Timer-trigger; schedule typically 5\u201315 minutes).\\n3) Configure Function App settings:\\n - `CyeraBaseUrl` \u2014 Cyera API Base URL\\n - `CyeraClientId` \u2014 Client ID (PAT)\\n - `CyeraSecret` \u2014 Client Secret (PAT)\\n - `DCR_IMMUTABLE_ID` \u2014 DCR immutable ID\\n - `DCE_ENDPOINT` \u2014 Logs ingestion endpoint URL\\n - `STREAM_ASSETS`=`Custom-CyeraAssets`, `STREAM_IDENTITIES`=`Custom-CyeraIdentities`, `STREAM_ISSUES`=`Custom-CyeraIssues`, `STREAM_CLASSIFICATIONS`=`Custom-CyeraClassifications`\\n4) Save and Start the Function App.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_Functions/FunctionAppDC.json","true" +"CyeraIdentities_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyera1658314682323","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraFunctionsConnector","Cyera Inc","Cyera DSPM Azure Functions Microsoft Sentinel Data Connector","The **Cyera DSPM Azure Function Connector** enables seamless ingestion of Cyera’s **Data Security Posture Management (DSPM)** telemetry — *Assets*, *Identities*, *Issues*, and *Classifications* — into **Microsoft Sentinel**.\n\nThis connector uses an **Azure Function App** to call Cyera’s REST API on a schedule, fetch the latest DSPM telemetry, and send it to Microsoft Sentinel through the **Azure Monitor Logs Ingestion API** via a **Data Collection Endpoint (DCE)** and **Data Collection Rule (DCR, kind: Direct)** — no agents required.\n\n**Tables created/used**\n\n| Entity | Table | Purpose |\n|---|---|---|\n| Assets | `CyeraAssets_CL` | Raw asset metadata and data-store context |\n| Identities | `CyeraIdentities_CL` | Identity definitions and sensitivity context |\n| Issues | `CyeraIssues_CL` | Findings and remediation details |\n| Classifications | `CyeraClassifications_CL` | Data class & sensitivity definitions |\n| MS View | `CyeraAssets_MS_CL` | Normalized asset view for dashboards |\n\n> **Note:** This v7 connector supersedes the earlier CCF-based approach and aligns with Microsoft’s recommended Direct ingestion path for Microsoft Sentinel.","[{""title"": ""Note"", ""description"": "">**NOTE:** This connector uses an **Azure Function App** and the **Azure Monitor Logs Ingestion API** (DCE + DCR, kind: Direct). Function runtime and data egress may incur charges. See [Azure Functions pricing](https://azure.microsoft.com/pricing/details/functions/).""}, {""title"": ""Optional Step"", ""description"": "">**(Optional)** Store Cyera API credentials in **Azure Key Vault** and reference them from the Function App. See [Key Vault references](https://learn.microsoft.com/azure/app-service/app-service-key-vault-references).""}, {""title"": ""STEP 1 \u2014 Prepare Cyera API Access"", ""description"": ""1) Generate a **Personal Access Token** [Generating Personal Access Token](https://support.cyera.io/hc/en-us/articles/19446274608919-Personal-and-API-Tokens) in your Cyera tenant.\\n2) Note **API Base URL**, **Client ID**, and **Client Secret**.""}, {""title"": ""STEP 2 \u2014 Choose ONE deployment option"", ""description"": ""> Before deploying, have these values handy:"", ""instructions"": [{""parameters"": {""fillWith"": [""CyeraDSPMConnector""], ""label"": ""Cyera Function Connector Name""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""{{workspace-location}}""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""{{workspace-location}}""], ""label"": ""Workspace Location""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""https://api.cyera.io""], ""label"": ""Cyera Base URL""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""CyeraClientID""], ""label"": ""Cyera Personal Access Token Client ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""CyeraSecret""], ""label"": ""Cyera Personal Access Token Secret""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1"", ""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the Cyera DSPM Functions and all required resources to support the connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/{{deployment-template-uri)\n2. Select the preferred **FunctionName** and **Workspace Name**. \n3. Enter the **Workspace Location**, **Cyera API Base Url**, **Personal Access Token Client ID**, and **Personal Access Token Secret**. \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 \u2014 Manual Deployment"", ""description"": ""Follow the [install pack\u2019s step-by-step guide]({{userguide-url}}.\\n\\n1) Create/update the 5 custom tables, data collection rule with format `sentinel-dce-`, and data collection endpoint with format `sentinel-dcr-` using the scripts in [install-pack-v0_7_0/scripts]({{deployment-script-zip-url}}).\\n2) Deploy the Azure Function from the repo`s Function folder (Timer-trigger; schedule typically 5\u201315 minutes).\\n3) Configure Function App settings:\\n - `CyeraBaseUrl` \u2014 Cyera API Base URL\\n - `CyeraClientId` \u2014 Client ID (PAT)\\n - `CyeraSecret` \u2014 Client Secret (PAT)\\n - `DCR_IMMUTABLE_ID` \u2014 DCR immutable ID\\n - `DCE_ENDPOINT` \u2014 Logs ingestion endpoint URL\\n - `STREAM_ASSETS`=`Custom-CyeraAssets`, `STREAM_IDENTITIES`=`Custom-CyeraIdentities`, `STREAM_ISSUES`=`Custom-CyeraIssues`, `STREAM_CLASSIFICATIONS`=`Custom-CyeraClassifications`\\n4) Save and Start the Function App.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_Functions/FunctionAppDC.json","true" +"CyeraIssues_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyera1658314682323","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraFunctionsConnector","Cyera Inc","Cyera DSPM Azure Functions Microsoft Sentinel Data Connector","The **Cyera DSPM Azure Function Connector** enables seamless ingestion of Cyera’s **Data Security Posture Management (DSPM)** telemetry — *Assets*, *Identities*, *Issues*, and *Classifications* — into **Microsoft Sentinel**.\n\nThis connector uses an **Azure Function App** to call Cyera’s REST API on a schedule, fetch the latest DSPM telemetry, and send it to Microsoft Sentinel through the **Azure Monitor Logs Ingestion API** via a **Data Collection Endpoint (DCE)** and **Data Collection Rule (DCR, kind: Direct)** — no agents required.\n\n**Tables created/used**\n\n| Entity | Table | Purpose |\n|---|---|---|\n| Assets | `CyeraAssets_CL` | Raw asset metadata and data-store context |\n| Identities | `CyeraIdentities_CL` | Identity definitions and sensitivity context |\n| Issues | `CyeraIssues_CL` | Findings and remediation details |\n| Classifications | `CyeraClassifications_CL` | Data class & sensitivity definitions |\n| MS View | `CyeraAssets_MS_CL` | Normalized asset view for dashboards |\n\n> **Note:** This v7 connector supersedes the earlier CCF-based approach and aligns with Microsoft’s recommended Direct ingestion path for Microsoft Sentinel.","[{""title"": ""Note"", ""description"": "">**NOTE:** This connector uses an **Azure Function App** and the **Azure Monitor Logs Ingestion API** (DCE + DCR, kind: Direct). Function runtime and data egress may incur charges. See [Azure Functions pricing](https://azure.microsoft.com/pricing/details/functions/).""}, {""title"": ""Optional Step"", ""description"": "">**(Optional)** Store Cyera API credentials in **Azure Key Vault** and reference them from the Function App. See [Key Vault references](https://learn.microsoft.com/azure/app-service/app-service-key-vault-references).""}, {""title"": ""STEP 1 \u2014 Prepare Cyera API Access"", ""description"": ""1) Generate a **Personal Access Token** [Generating Personal Access Token](https://support.cyera.io/hc/en-us/articles/19446274608919-Personal-and-API-Tokens) in your Cyera tenant.\\n2) Note **API Base URL**, **Client ID**, and **Client Secret**.""}, {""title"": ""STEP 2 \u2014 Choose ONE deployment option"", ""description"": ""> Before deploying, have these values handy:"", ""instructions"": [{""parameters"": {""fillWith"": [""CyeraDSPMConnector""], ""label"": ""Cyera Function Connector Name""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""{{workspace-location}}""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""{{workspace-location}}""], ""label"": ""Workspace Location""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""https://api.cyera.io""], ""label"": ""Cyera Base URL""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""CyeraClientID""], ""label"": ""Cyera Personal Access Token Client ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""CyeraSecret""], ""label"": ""Cyera Personal Access Token Secret""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1"", ""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the Cyera DSPM Functions and all required resources to support the connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/{{deployment-template-uri)\n2. Select the preferred **FunctionName** and **Workspace Name**. \n3. Enter the **Workspace Location**, **Cyera API Base Url**, **Personal Access Token Client ID**, and **Personal Access Token Secret**. \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 \u2014 Manual Deployment"", ""description"": ""Follow the [install pack\u2019s step-by-step guide]({{userguide-url}}.\\n\\n1) Create/update the 5 custom tables, data collection rule with format `sentinel-dce-`, and data collection endpoint with format `sentinel-dcr-` using the scripts in [install-pack-v0_7_0/scripts]({{deployment-script-zip-url}}).\\n2) Deploy the Azure Function from the repo`s Function folder (Timer-trigger; schedule typically 5\u201315 minutes).\\n3) Configure Function App settings:\\n - `CyeraBaseUrl` \u2014 Cyera API Base URL\\n - `CyeraClientId` \u2014 Client ID (PAT)\\n - `CyeraSecret` \u2014 Client Secret (PAT)\\n - `DCR_IMMUTABLE_ID` \u2014 DCR immutable ID\\n - `DCE_ENDPOINT` \u2014 Logs ingestion endpoint URL\\n - `STREAM_ASSETS`=`Custom-CyeraAssets`, `STREAM_IDENTITIES`=`Custom-CyeraIdentities`, `STREAM_ISSUES`=`Custom-CyeraIssues`, `STREAM_CLASSIFICATIONS`=`Custom-CyeraClassifications`\\n4) Save and Start the Function App.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_Functions/FunctionAppDC.json","true" +"CyfirmaASCertificatesAlerts_CL","Cyfirma Attack Surface","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-attack-surface","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaAttackSurfaceAlertsConnector","Microsoft","CYFIRMA Attack Surface","","[{""title"": ""CYFIRMA Attack Surface"", ""description"": ""Connect to CYFIRMA Attack Surface to ingest alerts into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into custom tables during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface/Data%20Connectors/CyfirmaASAlerts_ccp/CyfirmaASAlerts_DataConnectorDefinition.json","true" +"CyfirmaASCloudWeaknessAlerts_CL","Cyfirma Attack Surface","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-attack-surface","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaAttackSurfaceAlertsConnector","Microsoft","CYFIRMA Attack Surface","","[{""title"": ""CYFIRMA Attack Surface"", ""description"": ""Connect to CYFIRMA Attack Surface to ingest alerts into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into custom tables during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface/Data%20Connectors/CyfirmaASAlerts_ccp/CyfirmaASAlerts_DataConnectorDefinition.json","true" +"CyfirmaASConfigurationAlerts_CL","Cyfirma Attack Surface","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-attack-surface","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaAttackSurfaceAlertsConnector","Microsoft","CYFIRMA Attack Surface","","[{""title"": ""CYFIRMA Attack Surface"", ""description"": ""Connect to CYFIRMA Attack Surface to ingest alerts into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into custom tables during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface/Data%20Connectors/CyfirmaASAlerts_ccp/CyfirmaASAlerts_DataConnectorDefinition.json","true" +"CyfirmaASDomainIPReputationAlerts_CL","Cyfirma Attack Surface","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-attack-surface","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaAttackSurfaceAlertsConnector","Microsoft","CYFIRMA Attack Surface","","[{""title"": ""CYFIRMA Attack Surface"", ""description"": ""Connect to CYFIRMA Attack Surface to ingest alerts into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into custom tables during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface/Data%20Connectors/CyfirmaASAlerts_ccp/CyfirmaASAlerts_DataConnectorDefinition.json","true" +"CyfirmaASDomainIPVulnerabilityAlerts_CL","Cyfirma Attack Surface","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-attack-surface","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaAttackSurfaceAlertsConnector","Microsoft","CYFIRMA Attack Surface","","[{""title"": ""CYFIRMA Attack Surface"", ""description"": ""Connect to CYFIRMA Attack Surface to ingest alerts into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into custom tables during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface/Data%20Connectors/CyfirmaASAlerts_ccp/CyfirmaASAlerts_DataConnectorDefinition.json","true" +"CyfirmaASOpenPortsAlerts_CL","Cyfirma Attack Surface","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-attack-surface","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaAttackSurfaceAlertsConnector","Microsoft","CYFIRMA Attack Surface","","[{""title"": ""CYFIRMA Attack Surface"", ""description"": ""Connect to CYFIRMA Attack Surface to ingest alerts into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into custom tables during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface/Data%20Connectors/CyfirmaASAlerts_ccp/CyfirmaASAlerts_DataConnectorDefinition.json","true" +"CyfirmaBIDomainITAssetAlerts_CL","Cyfirma Brand Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-brand-intelligence","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaBrandIntelligenceAlertsDC","Microsoft","CYFIRMA Brand Intelligence","","[{""title"": ""CYFIRMA Brand Intelligence"", ""description"": ""Connect to CYFIRMA Brand Intelligence to ingest alerts data into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT Alerts API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into custom tables during ingestion. This enhances performance and efficiency by eliminating the need for query-time parsing."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence/Data%20Connectors/CyfirmaBIAlerts_ccp/CyfirmaBIAlerts_DataConnectorDefinition.json","true" +"CyfirmaBIExecutivePeopleAlerts_CL","Cyfirma Brand Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-brand-intelligence","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaBrandIntelligenceAlertsDC","Microsoft","CYFIRMA Brand Intelligence","","[{""title"": ""CYFIRMA Brand Intelligence"", ""description"": ""Connect to CYFIRMA Brand Intelligence to ingest alerts data into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT Alerts API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into custom tables during ingestion. This enhances performance and efficiency by eliminating the need for query-time parsing."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence/Data%20Connectors/CyfirmaBIAlerts_ccp/CyfirmaBIAlerts_DataConnectorDefinition.json","true" +"CyfirmaBIMaliciousMobileAppsAlerts_CL","Cyfirma Brand Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-brand-intelligence","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaBrandIntelligenceAlertsDC","Microsoft","CYFIRMA Brand Intelligence","","[{""title"": ""CYFIRMA Brand Intelligence"", ""description"": ""Connect to CYFIRMA Brand Intelligence to ingest alerts data into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT Alerts API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into custom tables during ingestion. This enhances performance and efficiency by eliminating the need for query-time parsing."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence/Data%20Connectors/CyfirmaBIAlerts_ccp/CyfirmaBIAlerts_DataConnectorDefinition.json","true" +"CyfirmaBIProductSolutionAlerts_CL","Cyfirma Brand Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-brand-intelligence","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaBrandIntelligenceAlertsDC","Microsoft","CYFIRMA Brand Intelligence","","[{""title"": ""CYFIRMA Brand Intelligence"", ""description"": ""Connect to CYFIRMA Brand Intelligence to ingest alerts data into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT Alerts API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into custom tables during ingestion. This enhances performance and efficiency by eliminating the need for query-time parsing."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence/Data%20Connectors/CyfirmaBIAlerts_ccp/CyfirmaBIAlerts_DataConnectorDefinition.json","true" +"CyfirmaBISocialHandlersAlerts_CL","Cyfirma Brand Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-brand-intelligence","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaBrandIntelligenceAlertsDC","Microsoft","CYFIRMA Brand Intelligence","","[{""title"": ""CYFIRMA Brand Intelligence"", ""description"": ""Connect to CYFIRMA Brand Intelligence to ingest alerts data into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT Alerts API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into custom tables during ingestion. This enhances performance and efficiency by eliminating the need for query-time parsing."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence/Data%20Connectors/CyfirmaBIAlerts_ccp/CyfirmaBIAlerts_DataConnectorDefinition.json","true" +"CyfirmaCompromisedAccounts_CL","Cyfirma Compromised Accounts","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Compromised%20Accounts","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirmacompromisedaccounts","2025-05-15","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaCompromisedAccountsDataConnector","Microsoft","CYFIRMA Compromised Accounts","The CYFIRMA Compromised Accounts data connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR/DeTCT API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","[{""title"": ""CYFIRMA Compromised Accounts"", ""description"": ""The CYFIRMA Compromised Accounts Data Connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR/DeTCT API to retrieve logs. Additionally, it supports DCR-based ingestion time transformations, which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""Setting it to true returns only data added since the last API call, while false returns all available data.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Compromised%20Accounts/Data%20Connectors/CyfirmaCompromisedAccounts_ccp/CyfirmaCompAcc_DataConnectorDefinition.json","true" +"CyfirmaCampaigns_CL","Cyfirma Cyber Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Cyber%20Intelligence","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-cyber-intelligence","2025-05-15","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaCyberIntelligenceDC","Microsoft","CYFIRMA Cyber Intelligence","The CYFIRMA Cyber Intelligence data connector enables seamless log ingestion from the DeCYFIR API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","[{""title"": ""CYFIRMA Cyber Intelligence"", ""description"": ""This connector provides the Indicators, Threat actors, Malware and Campaigns logs from CYFIRMA Cyber Intelligence. The connector uses the DeCYFIR API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""descriptionMarkdown"": ""This connector provides the Indicators, Threat actors, Malware and Campaigns logs from CYFIRMA Cyber Intelligence. The connector uses the DeCYFIR API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Pull all IoC's Or Tailored IoC's"", ""placeholder"": ""All IoC's or Tailored IoC's"", ""type"": ""text"", ""name"": ""apiAll"", ""defaultValue"": ""false"", ""description"": ""Set to true to pull all IoC's, set to false to pull tailoried IoC's""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""defaultValue"": ""false"", ""description"": ""Setting it to true returns only data added since the last API call, while false returns data from the last 24 hours.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Recommended Actions"", ""placeholder"": ""Recommended Action can be any one of:All/Monitor/Block"", ""type"": ""text"", ""name"": ""recommendedActions"", ""defaultValue"": ""All"", ""description"": ""Recommended Action can be any one of:All/Monitor/Block""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Threat Actor Associated"", ""placeholder"": ""Is any Threat Actor Associated with the IoC's"", ""type"": ""text"", ""name"": ""isThreatActorExists"", ""defaultValue"": ""false"", ""description"": ""Is any Threat Actor Associated with the IoC's""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Cyber%20Intelligence/Data%20Connectors/CyfirmaCyberIntelligence_ccp/CyfirmaCyberIntel_DataConnectorDefinition.json","true" +"CyfirmaIndicators_CL","Cyfirma Cyber Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Cyber%20Intelligence","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-cyber-intelligence","2025-05-15","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaCyberIntelligenceDC","Microsoft","CYFIRMA Cyber Intelligence","The CYFIRMA Cyber Intelligence data connector enables seamless log ingestion from the DeCYFIR API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","[{""title"": ""CYFIRMA Cyber Intelligence"", ""description"": ""This connector provides the Indicators, Threat actors, Malware and Campaigns logs from CYFIRMA Cyber Intelligence. The connector uses the DeCYFIR API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""descriptionMarkdown"": ""This connector provides the Indicators, Threat actors, Malware and Campaigns logs from CYFIRMA Cyber Intelligence. The connector uses the DeCYFIR API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Pull all IoC's Or Tailored IoC's"", ""placeholder"": ""All IoC's or Tailored IoC's"", ""type"": ""text"", ""name"": ""apiAll"", ""defaultValue"": ""false"", ""description"": ""Set to true to pull all IoC's, set to false to pull tailoried IoC's""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""defaultValue"": ""false"", ""description"": ""Setting it to true returns only data added since the last API call, while false returns data from the last 24 hours.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Recommended Actions"", ""placeholder"": ""Recommended Action can be any one of:All/Monitor/Block"", ""type"": ""text"", ""name"": ""recommendedActions"", ""defaultValue"": ""All"", ""description"": ""Recommended Action can be any one of:All/Monitor/Block""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Threat Actor Associated"", ""placeholder"": ""Is any Threat Actor Associated with the IoC's"", ""type"": ""text"", ""name"": ""isThreatActorExists"", ""defaultValue"": ""false"", ""description"": ""Is any Threat Actor Associated with the IoC's""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Cyber%20Intelligence/Data%20Connectors/CyfirmaCyberIntelligence_ccp/CyfirmaCyberIntel_DataConnectorDefinition.json","true" +"CyfirmaMalware_CL","Cyfirma Cyber Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Cyber%20Intelligence","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-cyber-intelligence","2025-05-15","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaCyberIntelligenceDC","Microsoft","CYFIRMA Cyber Intelligence","The CYFIRMA Cyber Intelligence data connector enables seamless log ingestion from the DeCYFIR API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","[{""title"": ""CYFIRMA Cyber Intelligence"", ""description"": ""This connector provides the Indicators, Threat actors, Malware and Campaigns logs from CYFIRMA Cyber Intelligence. The connector uses the DeCYFIR API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""descriptionMarkdown"": ""This connector provides the Indicators, Threat actors, Malware and Campaigns logs from CYFIRMA Cyber Intelligence. The connector uses the DeCYFIR API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Pull all IoC's Or Tailored IoC's"", ""placeholder"": ""All IoC's or Tailored IoC's"", ""type"": ""text"", ""name"": ""apiAll"", ""defaultValue"": ""false"", ""description"": ""Set to true to pull all IoC's, set to false to pull tailoried IoC's""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""defaultValue"": ""false"", ""description"": ""Setting it to true returns only data added since the last API call, while false returns data from the last 24 hours.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Recommended Actions"", ""placeholder"": ""Recommended Action can be any one of:All/Monitor/Block"", ""type"": ""text"", ""name"": ""recommendedActions"", ""defaultValue"": ""All"", ""description"": ""Recommended Action can be any one of:All/Monitor/Block""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Threat Actor Associated"", ""placeholder"": ""Is any Threat Actor Associated with the IoC's"", ""type"": ""text"", ""name"": ""isThreatActorExists"", ""defaultValue"": ""false"", ""description"": ""Is any Threat Actor Associated with the IoC's""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Cyber%20Intelligence/Data%20Connectors/CyfirmaCyberIntelligence_ccp/CyfirmaCyberIntel_DataConnectorDefinition.json","true" +"CyfirmaThreatActors_CL","Cyfirma Cyber Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Cyber%20Intelligence","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-cyber-intelligence","2025-05-15","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaCyberIntelligenceDC","Microsoft","CYFIRMA Cyber Intelligence","The CYFIRMA Cyber Intelligence data connector enables seamless log ingestion from the DeCYFIR API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","[{""title"": ""CYFIRMA Cyber Intelligence"", ""description"": ""This connector provides the Indicators, Threat actors, Malware and Campaigns logs from CYFIRMA Cyber Intelligence. The connector uses the DeCYFIR API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""descriptionMarkdown"": ""This connector provides the Indicators, Threat actors, Malware and Campaigns logs from CYFIRMA Cyber Intelligence. The connector uses the DeCYFIR API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Pull all IoC's Or Tailored IoC's"", ""placeholder"": ""All IoC's or Tailored IoC's"", ""type"": ""text"", ""name"": ""apiAll"", ""defaultValue"": ""false"", ""description"": ""Set to true to pull all IoC's, set to false to pull tailoried IoC's""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""defaultValue"": ""false"", ""description"": ""Setting it to true returns only data added since the last API call, while false returns data from the last 24 hours.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Recommended Actions"", ""placeholder"": ""Recommended Action can be any one of:All/Monitor/Block"", ""type"": ""text"", ""name"": ""recommendedActions"", ""defaultValue"": ""All"", ""description"": ""Recommended Action can be any one of:All/Monitor/Block""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Threat Actor Associated"", ""placeholder"": ""Is any Threat Actor Associated with the IoC's"", ""type"": ""text"", ""name"": ""isThreatActorExists"", ""defaultValue"": ""false"", ""description"": ""Is any Threat Actor Associated with the IoC's""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Cyber%20Intelligence/Data%20Connectors/CyfirmaCyberIntelligence_ccp/CyfirmaCyberIntel_DataConnectorDefinition.json","true" +"CyfirmaDBWMDarkWebAlerts_CL","Cyfirma Digital Risk","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-digital-risk","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaDigitalRiskAlertsConnector","Microsoft","CYFIRMA Digital Risk","The CYFIRMA Digital Risk Alerts data connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","[{""title"": ""CYFIRMA Digital Risk"", ""description"": ""Connect to CYFIRMA Digital Risk Alerts to ingest logs into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve alerts and supports DCR-based ingestion time transformations for efficient log parsing."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk/Data%20Connectors/CyfirmaDigitalRiskAlerts_ccp/CyfirmaDigitalRiskAlerts_DataConnectorDefinition.json","true" +"CyfirmaDBWMPhishingAlerts_CL","Cyfirma Digital Risk","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-digital-risk","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaDigitalRiskAlertsConnector","Microsoft","CYFIRMA Digital Risk","The CYFIRMA Digital Risk Alerts data connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","[{""title"": ""CYFIRMA Digital Risk"", ""description"": ""Connect to CYFIRMA Digital Risk Alerts to ingest logs into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve alerts and supports DCR-based ingestion time transformations for efficient log parsing."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk/Data%20Connectors/CyfirmaDigitalRiskAlerts_ccp/CyfirmaDigitalRiskAlerts_DataConnectorDefinition.json","true" +"CyfirmaDBWMRansomwareAlerts_CL","Cyfirma Digital Risk","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-digital-risk","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaDigitalRiskAlertsConnector","Microsoft","CYFIRMA Digital Risk","The CYFIRMA Digital Risk Alerts data connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","[{""title"": ""CYFIRMA Digital Risk"", ""description"": ""Connect to CYFIRMA Digital Risk Alerts to ingest logs into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve alerts and supports DCR-based ingestion time transformations for efficient log parsing."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk/Data%20Connectors/CyfirmaDigitalRiskAlerts_ccp/CyfirmaDigitalRiskAlerts_DataConnectorDefinition.json","true" +"CyfirmaSPEConfidentialFilesAlerts_CL","Cyfirma Digital Risk","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-digital-risk","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaDigitalRiskAlertsConnector","Microsoft","CYFIRMA Digital Risk","The CYFIRMA Digital Risk Alerts data connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","[{""title"": ""CYFIRMA Digital Risk"", ""description"": ""Connect to CYFIRMA Digital Risk Alerts to ingest logs into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve alerts and supports DCR-based ingestion time transformations for efficient log parsing."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk/Data%20Connectors/CyfirmaDigitalRiskAlerts_ccp/CyfirmaDigitalRiskAlerts_DataConnectorDefinition.json","true" +"CyfirmaSPEPIIAndCIIAlerts_CL","Cyfirma Digital Risk","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-digital-risk","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaDigitalRiskAlertsConnector","Microsoft","CYFIRMA Digital Risk","The CYFIRMA Digital Risk Alerts data connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","[{""title"": ""CYFIRMA Digital Risk"", ""description"": ""Connect to CYFIRMA Digital Risk Alerts to ingest logs into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve alerts and supports DCR-based ingestion time transformations for efficient log parsing."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk/Data%20Connectors/CyfirmaDigitalRiskAlerts_ccp/CyfirmaDigitalRiskAlerts_DataConnectorDefinition.json","true" +"CyfirmaSPESocialThreatAlerts_CL","Cyfirma Digital Risk","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-digital-risk","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaDigitalRiskAlertsConnector","Microsoft","CYFIRMA Digital Risk","The CYFIRMA Digital Risk Alerts data connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","[{""title"": ""CYFIRMA Digital Risk"", ""description"": ""Connect to CYFIRMA Digital Risk Alerts to ingest logs into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve alerts and supports DCR-based ingestion time transformations for efficient log parsing."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk/Data%20Connectors/CyfirmaDigitalRiskAlerts_ccp/CyfirmaDigitalRiskAlerts_DataConnectorDefinition.json","true" +"CyfirmaSPESourceCodeAlerts_CL","Cyfirma Digital Risk","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-digital-risk","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaDigitalRiskAlertsConnector","Microsoft","CYFIRMA Digital Risk","The CYFIRMA Digital Risk Alerts data connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","[{""title"": ""CYFIRMA Digital Risk"", ""description"": ""Connect to CYFIRMA Digital Risk Alerts to ingest logs into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve alerts and supports DCR-based ingestion time transformations for efficient log parsing."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk/Data%20Connectors/CyfirmaDigitalRiskAlerts_ccp/CyfirmaDigitalRiskAlerts_DataConnectorDefinition.json","true" +"CyfirmaVulnerabilities_CL","Cyfirma Vulnerabilities Intel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Vulnerabilities%20Intel","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-vulnerabilities","2025-05-15","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaVulnerabilitiesIntelDC","Microsoft","CYFIRMA Vulnerabilities Intelligence","The CYFIRMA Vulnerabilities Intelligence data connector enables seamless log ingestion from the DeCYFIR API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the CYFIRMA API's to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","[{""title"": ""CYFIRMA Vulnerabilities Intelligence"", ""description"": ""This connector provides the Vulnerabilities logs from CYFIRMA Vulnerabilities Intelligence. The connector uses the DeCYFIR API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""descriptionMarkdown"": ""This connector provides the Vulnerabilities logs from CYFIRMA Vulnerabilities Intelligence. The connector uses the DeCYFIR API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""defaultValue"": ""false"", ""description"": ""API Delta: If true (default), returns data since the last call; if false or unspecified, returns data from the last 24 hours.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Vendor-Associated Vulnerabilities"", ""placeholder"": """", ""type"": ""text"", ""name"": ""isVendor"", ""defaultValue"": ""false"", ""description"": ""The value for Vendor-Associated Vulnerabilities can be either true or false.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Product-Associated Vulnerabilities"", ""placeholder"": """", ""type"": ""text"", ""name"": ""isProduct"", ""defaultValue"": ""false"", ""description"": ""The value for Product-Associated Vulnerabilities can be either true or false.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Product with Version-Associated Vulnerabilities"", ""placeholder"": """", ""type"": ""text"", ""name"": ""isVersion"", ""defaultValue"": ""false"", ""description"": ""The value for Version-Associated Vulnerabilities can be either true or false.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Vulnerabilities%20Intel/Data%20Connectors/CyfirmaVulnerabilitiesIntel_ccp/CyfirmaVulnerabilities_DataConnectorDefinition.json","true" +"CynerioEvent_CL","Cynerio","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cynerio","cynerio1681887657820","cynerio-medical-device-security-sentinel-connector","2023-03-29","2023-03-29","","Cynerio","Partner","https://cynerio.com","","domains","CynerioSecurityEvents","Cynerio","Cynerio Security Events","The [Cynerio](https://www.cynerio.com/) connector allows you to easily connect your Cynerio Security Events with Microsoft Sentinel, to view IDS Events. This gives you more insight into your organization network security posture and improves your security operation capabilities. ","[{""title"": ""Configure and connect Cynerio"", ""description"": ""Cynerio can integrate with and export events directly to Microsoft Sentinel via Azure Server. Follow these steps to establish integration:\n\n1. In the Cynerio console, go to Settings > Integrations tab (default), and click on the **+Add Integration** button at the top right.\n\n2. Scroll down to the **SIEM** section.\n\n3. On the Microsoft Sentinel card, click the Connect button.\n\n4. The Integration Details window opens. Use the parameters below to fill out the form and set up the connection."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cynerio/Data%20Connectors/Cynerio_Connector.json","true" +"","Cyware","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyware","cywarelabsinc1709256751930","microsoft-sentinel-solution-cyware","2024-03-18","2024-03-18","","Cyware","Partner","","","domains","","","","","","","","false" +"","DEV-0537DetectionandHunting","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/DEV-0537DetectionandHunting","azuresentinel","azure-sentinel-solution-DEV-0537DetectionandHunting","2022-04-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","DNS Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/DNS%20Essentials","azuresentinel","azure-sentinel-solution-dns-domain","2023-01-14","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"darktrace_model_alerts_CL","Darktrace","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Darktrace","darktrace1655286944672","darktrace_for_sentinel","2022-05-02","","","Darktrace","Partner","https://www.darktrace.com/en/contact/","","domains","DarktraceRESTConnector","Darktrace","Darktrace Connector for Microsoft Sentinel REST API","The Darktrace REST API connector pushes real-time events from Darktrace to Microsoft Sentinel and is designed to be used with the Darktrace Solution for Sentinel. The connector writes logs to a custom log table titled ""darktrace_model_alerts_CL""; Model Breaches, AI Analyst Incidents, System Alerts and Email Alerts can be ingested - additional filters can be set up on the Darktrace System Configuration page. Data is pushed to Sentinel from Darktrace masters.","[{""title"": """", ""description"": ""1. Detailed setup instructions can be found on the Darktrace Customer Portal: https://customerportal.darktrace.com/product-guides/main/microsoft-sentinel-introduction\n 2. Take note of the Workspace ID and the Primary key. You will need to enter these details on your Darktrace System Configuration page.\n "", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Darktrace Configuration"", ""description"": ""1. Perform the following steps on the Darktrace System Configuration page:\n 2. Navigate to the System Configuration Page (Main Menu > Admin > System Config)\n 3. Go into Modules configuration and click on the \""Microsoft Sentinel\"" configuration card\n 4. Select \""HTTPS (JSON)\"" and hit \""New\""\n 5. Fill in the required details and select appropriate filters\n 6. Click \""Verify Alert Settings\"" to attempt authentication and send out a test alert\n 7. Run a \""Look for Test Alerts\"" sample query to validate that the test alert has been received"", ""instructions"": """"}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Darktrace Prerequisites"", ""description"": ""To use this Data Connector a Darktrace master running v5.2+ is required.\n Data is sent to the [Azure Monitor HTTP Data Collector API](https://docs.microsoft.com/azure/azure-monitor/logs/data-collector-api) over HTTPs from Darktrace masters, therefore outbound connectivity from the Darktrace master to Microsoft Sentinel REST API is required.""}, {""name"": ""Filter Darktrace Data"", ""description"": ""During configuration it is possible to set up additional filtering on the Darktrace System Configuration page to constrain the amount or types of data sent.""}, {""name"": ""Try the Darktrace Sentinel Solution"", ""description"": ""You can get the most out of this connector by installing the Darktrace Solution for Microsoft Sentinel. This will provide workbooks to visualise alert data and analytics rules to automatically create alerts and incidents from Darktrace Model Breaches and AI Analyst incidents.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Darktrace/Data%20Connectors/DarktraceConnectorRESTAPI.json","true" +"ThreatIntelligenceIndicator","Datalake2Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Datalake2Sentinel","cert_orange_cyberdefense","microsoft-sentinel-solution-datalake2sentinel","2024-01-15","2024-01-15","","Orange Cyberdefense","Partner","https://www.orangecyberdefense.com/global/contact","","domains,verticals","Datalake2SentinelConnector","Orange Cyberdefense","Datalake2Sentinel","This solution installs the Datalake2Sentinel connector which is built using the Codeless Connector Platform and allows you to automatically ingest threat intelligence indicators from **Datalake Orange Cyberdefense's CTI platform** into Microsoft Sentinel via the Upload Indicators REST API. After installing the solution, configure and enable this data connector by following guidance in Manage solution view.","[{""title"": ""Installation and setup instructions"", ""description"": ""Use the documentation from this Github repository to install and configure the Datalake to Microsoft Sentinel connector. \n\nhttps://github.com/cert-orangecyberdefense/datalake2sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Datalake2Sentinel/Data%20Connectors/Datalake2SentinelConnector.json","true" +"DataminrPulse_Alerts_CL","Dataminr Pulse","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dataminr%20Pulse","dataminrinc1648845584891","dataminr_sentinel","2023-04-12","2023-04-12","","Dataminr Support","Partner","https://www.dataminr.com/dataminr-support#support","","domains","DataminrPulseAlerts","Dataminr","Dataminr Pulse Alerts Data Connector","Dataminr Pulse Alerts Data Connector brings our AI-powered real-time intelligence into Microsoft Sentinel for faster threat detection and response.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the DataminrPulse in which logs are pushed via Dataminr RTAP and it will ingest logs into Microsoft Sentinel. Furthermore, the connector will fetch the ingested data from the custom logs table and create Threat Intelligence Indicators into Microsoft Sentinel Threat Intelligence. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1- Credentials for the Dataminr Pulse Client ID and Client Secret**\n\n * Obtain Dataminr Pulse user ID/password and API client ID/secret from your Dataminr Customer Success Manager (CSM).""}, {""title"": """", ""description"": ""**STEP 2- Configure Watchlists in Dataminr Pulse portal.**\n\n Follow the steps in this section to configure watchlists in portal:\n\n 1. **Login** to the Dataminr Pulse [website](https://app.dataminr.com).\n\n 2. Click on the settings gear icon, and select **Manage Lists**.\n\n 3. Select the type of Watchlist you want to create (Cyber, Topic, Company, etc.) and click the **New List** button.\n\n 4. Provide a **name** for your new Watchlist, and select a highlight color for it, or keep the default color.\n\n 5. When you are done configuring the Watchlist, click **Save** to save it.""}, {""title"": """", ""description"": ""**STEP 3 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of DataminrPulse Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 4 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of DataminrPulse Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of DataminrPulse Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Dataminr Pulse Microsoft Sentinel data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DataminrPulse connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-DataminrPulseAlerts-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-DataminrPulseAlerts-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\n\t a. **Function Name** \n\n\t b. **Location**: The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t c. **Workspace**: Enter Workspace ID of log analytics Workspace ID \n\n\t d. **Workspace Key**: Enter Primary Key of log analytics Workspace \n\n\t e. **DataminrBaseURL**: Enter Base URL starting with \""https://\"" followed by hostname (Example: https://gateway.dataminr.com/) \n\n\t f. **ClientId**: Enter your Dataminr account Client ID \n\n\t g. **ClientSecret**: Enter your Dataminr account Client Secret \n\n\t h. **AzureEntraObjectID**: Enter Object id of your Microsoft Entra App \n\n\t i. **AlertsTableName**: Enter name of the table used to store Dataminr Alerts logs. Default is 'DataminrPulse_Alerts' \n\n\t j. **AzureClientId**: Enter Azure Client ID that you have created during app registration \n\n\t k. **AzureClientSecret**: Enter Azure Client Secret that you have created during creating the client secret \n\n\t l. **AzureTenantId**: Enter Azure Tenant ID of your Azure Active Directory \n\n\t m. **AzureResourceGroupName**: Enter Azure Resource Group Name in which you want deploy the data connector \n\n\t n. **AzureWorkspaceName**: Enter Microsoft Sentinel Workspace Name of Log Analytics workspace \n\n\t o. **AzureSubscriptionId**: Enter Azure Subscription Id which is present in the subscription tab in Microsoft Sentinel \n\n\t p. **LogLevel**: Add log level or log severity value. Default is 'INFO' \n\n\t q. **Schedule**: Enter a valid Quartz Cron-Expression (Example: 0 0 0 * * *) \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Dataminr Pulse Microsoft Sentinel data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": ""1) Deploy a Function App"", ""description"": ""> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-DataminrPulseAlerts-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. DmPulseXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": ""2) Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\n\t a. **Function Name** \n\n\t b. **Location**: The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t c. **Workspace**: Enter Workspace ID of log analytics Workspace ID \n\n\t d. **Workspace Key**: Enter Primary Key of log analytics Workspace \n\n\t e. **DataminrBaseURL**: Enter Base URL starting with \""https://\"" followed by hostname (Example: https://gateway.dataminr.com/) \n\n\t f. **ClientId**: Enter your Dataminr account Client ID \n\n\t g. **ClientSecret**: Enter your Dataminr account Client Secret \n\n\t h. **AzureEntraObjectID**: Enter Object id of your Microsoft Entra App \n\n\t i. **AlertsTableName**: Enter name of the table used to store Dataminr Alerts logs. Default is 'DataminrPulse_Alerts' \n\n\t j. **AzureClientId**: Enter Azure Client ID that you have created during app registration \n\n\t k. **AzureClientSecret**: Enter Azure Client Secret that you have created during creating the client secret \n\n\t l. **AzureTenantId**: Enter Azure Tenant ID of your Azure Active Directory \n\n\t m. **AzureResourceGroupName**: Enter Azure Resource Group Name in which you want deploy the data connector \n\n\t n. **AzureWorkspaceName**: Enter Microsoft Sentinel Workspace Name of Log Analytics workspace \n\n\t o. **AzureSubscriptionId**: Enter Azure Subscription Id which is present in the subscription tab in Microsoft Sentinel \n\n\t p. **LogLevel**: Add log level or log severity value. Default is 'INFO' \n\n\t q. **Schedule**: Enter a valid Quartz Cron-Expression (Example: 0 0 0 * * *) \n\n\t r. **logAnalyticsUri** (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}, {""title"": """", ""description"": ""**STEP 7 - Post Deployment steps**\n\n""}, {""title"": ""1) Get the Function app endpoint"", ""description"": ""1. Go to Azure function Overview page and Click on **\""Functions\""** in the left blade.\n2. Click on the function called **\""DataminrPulseAlertsHttpStarter\""**.\n3. Go to **\""GetFunctionurl\""** and copy the function url.\n4. Replace **{functionname}** with **\""DataminrPulseAlertsSentinelOrchestrator\""** in copied function url.""}, {""title"": ""2) To add integration settings in Dataminr RTAP using the function URL"", ""description"": ""1. Open any API request tool like Postman.\n2. Click on '+' to create a new request.\n3. Select HTTP request method as **'POST'**.\n4. Enter the url prepapred in **point 1)**, in the request URL part.\n5. In Body, select raw JSON and provide request body as below(case-sensitive): \n\t\t{ \n\t\t \""integration-settings\"": \""ADD\"", \n\t\t \""url\"": \""`(URL part from copied Function-url)`\"", \n\t\t \""token\"": \""`(value of code parameter from copied Function-url)`\"" \n\t\t}\n6. After providing all required details, click **Send**.\n7. You will receive an integration setting ID in the HTTP response with a status code of 200.\n8. Save **Integration ID** for future reference.""}, {""title"": """", ""description"": ""*Now we are done with the adding integration settings for Dataminr RTAP. Once the Dataminr RTAP send an alert data, Function app is triggered and you should be able to see the Alerts data from the Dataminr Pulse into LogAnalytics workspace table called \""DataminrPulse_Alerts_CL\"".*\n\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Required Dataminr Credentials/permissions"", ""description"": ""\n\na. Users must have a valid Dataminr Pulse API **client ID** and **secret** to use this data connector.\n\n b. One or more Dataminr Pulse Watchlists must be configured in the Dataminr Pulse website.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dataminr%20Pulse/Data%20Connectors/DataminrPulseAlerts/DataminrPulseAlerts_FunctionApp.json","true" +"CommonSecurityLog","Delinea Secret Server","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Delinea%20Secret%20Server","delineainc1653506022260","delinea_secret_server_mss","2022-05-06","","","Delinea","Partner","https://delinea.com/support/","","domains","DelineaSecretServerAma","Delinea, Inc","[Deprecated] Delinea Secret Server via AMA","Common Event Format (CEF) from Delinea Secret Server ","[{""title"": """", ""description"": """", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine""}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Delinea%20Secret%20Server/Data%20Connectors/template_DelineaSecretServerAMA.json","true" +"CommonSecurityLog","Delinea Secret Server","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Delinea%20Secret%20Server","delineainc1653506022260","delinea_secret_server_mss","2022-05-06","","","Delinea","Partner","https://delinea.com/support/","","domains","DelineaSecretServer_CEF","Delinea, Inc","[Deprecated] Delinea Secret Server via Legacy Agent","Common Event Format (CEF) from Delinea Secret Server ","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Delinea Secret Server"", ""description"": ""must be configured to export logs via Syslog \n\n [Learn more about configure Secret Server](https://thy.center/ss/link/syslog)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Delinea%20Secret%20Server/Data%20Connectors/DelineaSecretServer_CEF.json","true" +"","Dev 0270 Detection and Hunting","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dev%200270%20Detection%20and%20Hunting","azuresentinel","azure-sentinel-solution-dev0270detectionandhunting","2022-11-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"Syslog","Digital Guardian Data Loss Prevention","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Digital%20Guardian%20Data%20Loss%20Prevention","azuresentinel","azure-sentinel-solution-digitalguardiandlp","2021-07-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","DigitalGuardianDLP","Digital Guardian","[Deprecated] Digital Guardian Data Loss Prevention","[Digital Guardian Data Loss Prevention (DLP)](https://digitalguardian.com/platform-overview) data connector provides the capability to ingest Digital Guardian DLP logs into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**DigitalGuardianDLPEvent**](https://aka.ms/sentinel-DigitalGuardianDLP-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Configure Digital Guardian to forward logs via Syslog to remote server where you will install the agent."", ""description"": ""Follow these steps to configure Digital Guardian to forward logs via Syslog:\n\n1.1. Log in to the Digital Guardian Management Console.\n\n1.2. Select **Workspace** > **Data Export** > **Create Export**.\n\n1.3. From the **Data Sources** list, select **Alerts** or **Events** as the data source.\n\n1.4. From the **Export type** list, select **Syslog**.\n\n1.5. From the **Type list**, select **UDP** or **TCP** as the transport protocol.\n\n1.6. In the **Server** field, type the IP address of your Remote Syslog server.\n\n1.7. In the **Port** field, type 514 (or other port if your Syslog server was configured to use non-default port).\n\n1.8. From the **Severity Level** list, select a severity level.\n\n1.9. Select the **Is Active** check box.\n\n1.9. Click **Next**.\n\n1.10. From the list of available fields, add Alert or Event fields for your data export.\n\n1.11. Select a Criteria for the fields in your data export and click **Next**.\n\n1.12. Select a group for the criteria and click **Next**.\n\n1.13. Click **Test Query**.\n\n1.14. Click **Next**.\n\n1.15. Save the data export.""}, {""title"": ""2. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Server to which the logs will be forwarded.\n\n> Logs on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""3. Check logs in Microsoft Sentinel"", ""description"": ""Open Log Analytics to check if the logs are received using the Syslog schema.\n\n>**NOTE:** It may take up to 15 minutes before new logs will appear in Syslog table."", ""instructions"": []}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Digital%20Guardian%20Data%20Loss%20Prevention/Data%20Connectors/Connector_DigitalGuardian_Syslog.json","true" +"DigitalShadows_CL","Digital Shadows","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Digital%20Shadows","digitalshadows1662022995707","digitalshadows_searchlight_for_sentinel","","","","Digital Shadows","Partner","https://www.digitalshadows.com/","","domains","DigitalShadowsSearchlightAzureFunctions","Digital Shadows","Digital Shadows Searchlight","The Digital Shadows data connector provides ingestion of the incidents and alerts from Digital Shadows Searchlight into the Microsoft Sentinel using the REST API. The connector will provide the incidents and alerts information such that it helps to examine, diagnose and analyse the potential security risks and threats.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a 'Digital Shadows Searchlight' to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the 'Digital Shadows Searchlight' API**\n\nThe provider should provide or link to detailed steps to configure the 'Digital Shadows Searchlight' API endpoint so that the Azure Function can authenticate to it successfully, get its authorization key or token, and pull the appliance's logs into Microsoft Sentinel.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the 'Digital Shadows Searchlight' connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the 'Digital Shadows Searchlight' API authorization key(s) or Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the 'Digital Shadows Searchlight' connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-Digitalshadows-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password**, 'and/or Other required fields'. \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": """", ""description"": ""**Option 2 - Manual Deployment of Azure Functions**\n\n Use the following step-by-step instructions to deploy the 'Digital Shadows Searchlight' connector manually with Azure Functions.""}, {""title"": ""1. Create a Function App"", ""description"": ""1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, ensure Runtime stack is set to **python 3.11**. \n4. In the **Hosting** tab, ensure **Plan type** is set to **'Consumption (Serverless)'**.\n5.select Storage account\n6. 'Add other required configurations'. \n5. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""2. Import Function App Code(Zip deployment)"", ""description"": ""1. Install Azure CLI\n2. From terminal type **az functionapp deployment source config-zip -g -n --src ** and hit enter. Set the `ResourceGroup` value to: your resource group name. Set the `FunctionApp` value to: your newly created function app name. Set the `Zip File` value to: `digitalshadowsConnector.zip`(path to your zip file). Note:- Download the zip file from the link - [Function App Code](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Digital%20Shadows/Data%20Connectors/Digital%20Shadows/digitalshadowsConnector.zip)""}, {""title"": ""3. Configure the Function App"", ""description"": ""1. In the Function App screen, click the Function App name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following 'x (number of)' application settings individually, under Name, with their respective string values (case-sensitive) under Value: \n\t\tDigitalShadowsAccountID\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tDigitalShadowsKey\n\t\tDigitalShadowsSecret\n\t\tHistoricalDays\n\t\tDigitalShadowsURL\n\t\tClassificationFilterOperation\n\t\tHighVariabilityClassifications\n\t\tFUNCTION_NAME\n\t\tlogAnalyticsUri (optional)\n(add any other settings required by the Function App)\nSet the `DigitalShadowsURL` value to: `https://api.searchlight.app/v1`\nSet the `HighVariabilityClassifications` value to: `exposed-credential,marked-document`\nSet the `ClassificationFilterOperation` value to: `exclude` for exclude function app or `include` for include function app \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Azure Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: https://.ods.opinsights.azure.us. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Digital Shadows account ID, secret and key** is required. See the documentation to learn more about API on the `https://portal-digitalshadows.com/learn/searchlight-api/overview/description`.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Digital%20Shadows/Data%20Connectors/Digital%20Shadows/DigitalShadowsSearchlight_API_functionApp.json","true" +"","DomainTools","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/DomainTools","domaintoolsllc1647901527537","domaintools-iris-investigate","2022-10-20","","","DomainTools","Partner","https://www.domaintools.com/support/","","domains","","","","","","","","false" +"DoppelTable_CL","Doppel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Doppel","doppel","azure-sentinel-solution-doppel","2024-11-20","","","Doppel","Partner","https://www.doppel.com/request-a-demo","","domains","Doppel_DataConnector","Doppel","Doppel Data Connector","The data connector is built on Microsoft Sentinel for Doppel events and alerts and supports DCR-based [ingestion time transformations](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/ingestion-time-transformations) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance.","[{""title"": ""Configure Doppel Webhook"", ""description"": ""Configure the Webhook in Doppel and Endpoint with permissions in Microsoft Sentinel to send data."", ""instructions"": [{""type"": ""InstructionStepsGroup"", ""parameters"": {""enable"": true, ""userRequestPlaceHolder"": """", ""instructionSteps"": [{""title"": ""Register the Application in Microsoft Entra ID"", ""description"": ""1. **Open the [Microsoft Entra ID page](https://entra.microsoft.com/)**:\n - Click the provided link to open the **Microsoft Entra ID** registration page in a new tab.\n - Ensure you are logged in with an account that has **Admin level** permissions.\n\n2. **Create a New Application**:\n - In the **Microsoft Entra ID portal**, select **App registrations** mentioned on the left-hand side tab.\n - Click on **+ New registration**.\n - Fill out the following fields:\n - **Name**: Enter a name for the app (e.g., \u201cDoppel App\u201d).\n - **Supported account types**: Choose **Accounts in this organizational directory only** (Default Directory only - Single tenant).\n - **Redirect URI**: Leave this blank unless required otherwise.\n - Click **Register** to create the application.\n\n3. **Copy Application and Tenant IDs**:\n - Once the app is registered, note the **Application (client) ID** and **Directory (tenant) ID** from the **Overview** page. You\u2019ll need these for the integration.\n\n4. **Create a Client Secret**:\n - In the **Certificates & secrets** section, click **+ New client secret**.\n - Add a description (e.g., 'Doppel Secret') and set an expiration (e.g., 1 year).\n - Click **Add**.\n - **Copy the client secret value immediately**, as it will not be shown again.""}, {""title"": ""Assign the \""Monitoring Metrics Publisher\"" Role to the App"", ""description"": ""1. **Open the Resource Group in Azure Portal**:\n - Navigate to the **Resource Group** that contains the **Log Analytics Workspace** and **Data Collection Rules (DCRs)** where you want the app to push data.\n\n2. **Assign the Role**:\n - In the **Resource Group** menu, click on **Access control (IAM)** mentioned on the left-hand side tab ..\n - Click on **+ Add** and select **Add role assignment**.\n - In the **Role** dropdown, search for and select the **Monitoring Metrics Publisher** role.\n - Under **Assign access to**, choose **Azure AD user, group, or service principal**.\n - In the **Select** field, search for your registered app by **name** or **client ID**.\n - Click **Save** to assign the role to the application.""}, {""title"": ""Deploy the ARM Template"", ""description"": ""1. **Retrieve the Workspace ID**:\n - After assigning the role, you will need the **Workspace ID**.\n - Navigate to the **Log Analytics Workspace** within the **Resource Group**.\n - In the **Overview** section, locate the **Workspace ID** field under **Workspace details**.\n - **Copy the Workspace ID** and keep it handy for the next steps.\n\n2. **Click the Deploy to Azure Button**:\n - [![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fmetron-labs%2FAzure-Sentinel%2Frefs%2Fheads%2FDoppelSolution%2FSolutions%2FDoppel%2FData%2520Connectors%2FDeployToAzure.json).\n - This will take you directly to the Azure portal to start the deployment.\n\n3. **Review and Customize Parameters**:\n - On the custom deployment page, ensure you\u2019re deploying to the correct **subscription** and **resource group**.\n - Fill in the parameters like **workspace name**, **workspace ID**, and **workspace location**.\n\n4. **Click Review + Create** and then **Create** to deploy the resources.""}, {""title"": ""Verify DCE, DCR, and Log Analytics Table Setup"", ""description"": ""1. **Check the Data Collection Endpoint (DCE)**:\n - After deploying, go to **Azure Portal > Data Collection Endpoints**.\n - Verify that the **DoppelDCE** endpoint has been created successfully.\n - **Copy the DCE Logs Ingestion URI**, as you\u2019ll need this for generating the webhook URL.\n\n2. **Confirm Data Collection Rule (DCR) Setup**:\n - Go to **Azure Portal > Data Collection Rules**.\n - Ensure the **DoppelDCR** rule is present.\n - **Copy the Immutable ID** of the DCR from the Overview page, as you\u2019ll need it for the webhook URL.\n\n3. **Validate Log Analytics Table**:\n - Navigate to your **Log Analytics Workspace** (linked to Microsoft Sentinel).\n - Under the **Tables** section, verify that the **DoppelTable_CL** table has been created successfully and is ready to receive data.""}, {""title"": ""Integrate Doppel Alerts with Microsoft Sentinel"", ""description"": ""1. **Gather Necessary Information**:\n - Collect the following details required for integration:\n - **Data Collection Endpoint ID (DCE-ID)**\n - **Data Collection Rule ID (DCR-ID)**\n - **Microsoft Entra Credentials**: Tenant ID, Client ID, and Client Secret.\n\n2. **Coordinate with Doppel Support**:\n - Share the collected DCE-ID, DCR-ID, and Microsoft Entra credentials with Doppel support.\n - Request assistance to configure these details in the Doppel tenant to enable webhook setup.\n\n3. **Webhook Setup by Doppel**:\n - Doppel will use the provided Resource IDs and credentials to configure a webhook.\n - This webhook will facilitate the forwarding of alerts from Doppel to Microsoft Sentinel.\n\n4. **Verify Alert Delivery in Microsoft Sentinel**:\n - Check that alerts from Doppel are successfully forwarded to Microsoft Sentinel.\n - Validate that the **Workbook** in Microsoft Sentinel is updated with the alert statistics, ensuring seamless data integration.""}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": false}}], ""customs"": [{""name"": ""Microsoft Entra Tenant ID, Client ID and Client Secret"", ""description"": ""Microsoft Entra ID requires a Client ID and Client Secret to authenticate your application. Additionally, Global Admin/Owner level access is required to assign the Entra-registered application a Resource Group Monitoring Metrics Publisher role.""}, {""name"": ""Requires Workspace ID, DCE-URI, DCR-ID"", ""description"": ""You will need to get the Log Analytics Workspace ID, DCE Logs Ingestion URI and DCR Immutable ID for the configuration.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Doppel/Data%20Connectors/Template_Doppel.json","true" +"DragosAlerts_CL","Dragos","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dragos","dragosinc1734451815609","microsoft-sentinel-solution-dragos","2025-01-23","2025-01-23","","Dragos Inc","Partner","https://www.dragos.com","","domains","DragosSitestoreCCP","Dragos"," Dragos Notifications via Cloud Sitestore","The [Dragos Platform](https://www.dragos.com/) is the leading Industrial Cyber Security platform it offers a comprehensive Operational Technology (OT) cyber threat detection built by unrivaled industrial cybersecurity expertise. This solution enables Dragos Platform notification data to be viewed in Microsoft Sentinel so that security analysts are able to triage potential cyber security events occurring in their industrial environments.","[{""description"": ""Please provide the following information to allow Microsoft Sentinel to connect to your Dragos Sitestore."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Dragos Sitestore Hostname"", ""placeholder"": ""dragossitestore.example.com"", ""type"": ""text"", ""name"": ""dragosSitestoreHostname""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Dragos Sitestore API Key ID"", ""placeholder"": ""Enter the API key ID."", ""type"": ""text"", ""name"": ""username""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Dragos Sitestore API Key Secret"", ""placeholder"": ""Enter the API key secret"", ""type"": ""password"", ""name"": ""password""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Minimum Notification Severity. Valid values are 0-5 inclusive. Ensure less than or equal to maximum severity."", ""placeholder"": ""Enter the min severity (recommend 0 for all notifications)"", ""type"": ""number"", ""name"": ""minSeverity""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Maximum Notification Severity. Valid values are 0-5 inclusive. Ensure greater than or equal to minimum severity."", ""placeholder"": ""Enter the max severity (recommend 5 for all notifications)"", ""type"": ""number"", ""name"": ""maxSeverity""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect to Sitestore"", ""disconnectLabel"": ""Disconnect from Sitestore"", ""name"": ""connectionToggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Dragos Sitestore API access"", ""description"": ""A Sitestore user account that has the `notification:read` permission. This account also needs to have an API key that can be provided to Sentinel.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dragos/Data%20Connectors/DragosSiteStore_CCP/dragosSitestoreDataConnectorDefinition.json","true" +"DruvaInsyncEvents_CL","DruvaDataSecurityCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/DruvaDataSecurityCloud","druva-azuresentinel-solution","azure-sentinel-solution-druva","2024-12-24","","","Druva Inc","Partner","https://support.druva.com/","","domains","DruvaEventCCPDefinition","Microsoft","Druva Events Connector","Provides capability to ingest the Druva events from Druva APIs","[{""description"": "">Note: Configurations to connect to Druva Rest API\n""}, {""description"": ""Step 1: Create credentials from Druva console. Refer this doc for steps:- https://help.druva.com/en/articles/8580838-create-and-manage-api-credentials\n""}, {""description"": ""Step 2: Enter the hostname. For public cloud its apis.druva.com\n""}, {""description"": ""Step 3: Enter client id and client secret key\n""}, {""description"": ""Provide required values:\n"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Hostname"", ""placeholder"": ""Example: apis.druva.com"", ""type"": ""text"", ""name"": ""hostname""}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}], ""title"": ""Connect to Druva API to start collecting logs in Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permission are required"", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Druva API Access"", ""description"": ""Druva API requires a client id and client secret to authenticate""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/DruvaDataSecurityCloud/Data%20Connectors/Druva_ccp/Druva_DataConnectorDefinition.json","true" +"DruvaPlatformEvents_CL","DruvaDataSecurityCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/DruvaDataSecurityCloud","druva-azuresentinel-solution","azure-sentinel-solution-druva","2024-12-24","","","Druva Inc","Partner","https://support.druva.com/","","domains","DruvaEventCCPDefinition","Microsoft","Druva Events Connector","Provides capability to ingest the Druva events from Druva APIs","[{""description"": "">Note: Configurations to connect to Druva Rest API\n""}, {""description"": ""Step 1: Create credentials from Druva console. Refer this doc for steps:- https://help.druva.com/en/articles/8580838-create-and-manage-api-credentials\n""}, {""description"": ""Step 2: Enter the hostname. For public cloud its apis.druva.com\n""}, {""description"": ""Step 3: Enter client id and client secret key\n""}, {""description"": ""Provide required values:\n"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Hostname"", ""placeholder"": ""Example: apis.druva.com"", ""type"": ""text"", ""name"": ""hostname""}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}], ""title"": ""Connect to Druva API to start collecting logs in Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permission are required"", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Druva API Access"", ""description"": ""Druva API requires a client id and client secret to authenticate""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/DruvaDataSecurityCloud/Data%20Connectors/Druva_ccp/Druva_DataConnectorDefinition.json","true" +"DruvaSecurityEvents_CL","DruvaDataSecurityCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/DruvaDataSecurityCloud","druva-azuresentinel-solution","azure-sentinel-solution-druva","2024-12-24","","","Druva Inc","Partner","https://support.druva.com/","","domains","DruvaEventCCPDefinition","Microsoft","Druva Events Connector","Provides capability to ingest the Druva events from Druva APIs","[{""description"": "">Note: Configurations to connect to Druva Rest API\n""}, {""description"": ""Step 1: Create credentials from Druva console. Refer this doc for steps:- https://help.druva.com/en/articles/8580838-create-and-manage-api-credentials\n""}, {""description"": ""Step 2: Enter the hostname. For public cloud its apis.druva.com\n""}, {""description"": ""Step 3: Enter client id and client secret key\n""}, {""description"": ""Provide required values:\n"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Hostname"", ""placeholder"": ""Example: apis.druva.com"", ""type"": ""text"", ""name"": ""hostname""}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}], ""title"": ""Connect to Druva API to start collecting logs in Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permission are required"", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Druva API Access"", ""description"": ""Druva API requires a client id and client secret to authenticate""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/DruvaDataSecurityCloud/Data%20Connectors/Druva_ccp/Druva_DataConnectorDefinition.json","true" +"Dynamics365Activity","Dynamics 365","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynamics%20365","sentinel4dynamics365","dynamics365connector","2023-01-17","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Dynamics365","Microsoft","Dynamics 365","The Dynamics 365 Common Data Service (CDS) activities connector provides insight into admin, user, and support activities, as well as Microsoft Social Engagement logging events. By connecting Dynamics 365 CRM logs into Microsoft Sentinel, you can view this data in workbooks, use it to create custom alerts, and improve your investigation process. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com//fwlink/p/?linkid=2226719&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""description"": ""Connect [Dynamics 365 CRM](https://aka.ms/Sentinel/Dynamics365) activity logs to your Microsoft Sentinel workspace."", ""instructions"": [{""parameters"": {""connectorKind"": ""Dynamics365"", ""title"": ""Dynamics365"", ""enable"": true}, ""type"": ""SentinelResourceProvider""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Tenant Permissions"", ""description"": ""'Security Administrator' or 'Global Administrator' on the workspace's tenant.""}, {""name"": ""License"", ""description"": ""[Microsoft Dynamics 365 production license](https://docs.microsoft.com/office365/servicedescriptions/microsoft-dynamics-365-online-service-description) (This connector is available for production environments only, not for sandbox). Also, a Microsoft 365 Enterprise [E3 or E5](https://docs.microsoft.com/power-platform/admin/enable-use-comprehensive-auditing#requirements) subscription is required for Activity Logging.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynamics%20365/Data%20Connectors/template_Dynamics365.json","true" +"DynatraceAttacks_CL","Dynatrace","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynatrace","dynatrace","dynatrace_azure_sentinel","2022-10-18","2023-10-16","","Dynatrace","Partner","https://www.dynatrace.com/services-support/","","domains","DynatraceAttacks","Dynatrace","Dynatrace Attacks","This connector uses the Dynatrace Attacks REST API to ingest detected attacks into Microsoft Sentinel Log Analytics","[{""title"": ""Dynatrace Attack Events to Microsoft Sentinel"", ""description"": ""Configure and Enable Dynatrace [Application Security](https://www.dynatrace.com/platform/application-security/). \n Follow [these instructions](https://docs.dynatrace.com/docs/shortlink/token#create-api-token) to generate an access token."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Dynatrace tenant (ex. xyz.dynatrace.com)"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{dynatraceEnvironmentUrl}}""}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Dynatrace tenant (ex. xyz.dynatrace.com)"", ""description"": ""You need a valid Dynatrace tenant with [Application Security](https://www.dynatrace.com/platform/application-security/) enabled, learn more about the [Dynatrace platform](https://www.dynatrace.com/).""}, {""name"": ""Dynatrace Access Token"", ""description"": ""You need a Dynatrace Access Token, the token should have ***Read attacks*** (attacks.read) scope.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynatrace/Data%20Connectors/Connector_Dynatrace_Attacks.json","true" +"DynatraceAuditLogs_CL","Dynatrace","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynatrace","dynatrace","dynatrace_azure_sentinel","2022-10-18","2023-10-16","","Dynatrace","Partner","https://www.dynatrace.com/services-support/","","domains","DynatraceAuditLogs","Dynatrace","Dynatrace Audit Logs","This connector uses the [Dynatrace Audit Logs REST API](https://docs.dynatrace.com/docs/dynatrace-api/environment-api/audit-logs) to ingest tenant audit logs into Microsoft Sentinel Log Analytics","[{""title"": ""Dynatrace Audit Log Events to Microsoft Sentinel"", ""description"": ""Enable Dynatrace Audit [Logging](https://docs.dynatrace.com/docs/shortlink/audit-logs#enable-audit-logging). \n Follow [these instructions](https://docs.dynatrace.com/docs/shortlink/token#create-api-token) to generate an access token."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Dynatrace tenant (ex. xyz.dynatrace.com)"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{dynatraceEnvironmentUrl}}""}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Dynatrace tenant (ex. xyz.dynatrace.com)"", ""description"": ""You need a valid Dynatrace Tenant, to learn more about the Dynatrace platform [Start your free trial](https://www.dynatrace.com/trial).""}, {""name"": ""Dynatrace Access Token"", ""description"": ""You need a Dynatrace Access Token, the token should have ***Read audit logs*** (auditLogs.read) scope.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynatrace/Data%20Connectors/Connector_Dynatrace_AuditLogs.json","true" +"DynatraceProblems_CL","Dynatrace","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynatrace","dynatrace","dynatrace_azure_sentinel","2022-10-18","2023-10-16","","Dynatrace","Partner","https://www.dynatrace.com/services-support/","","domains","DynatraceProblems","Dynatrace","Dynatrace Problems","This connector uses the [Dynatrace Problem REST API](https://docs.dynatrace.com/docs/dynatrace-api/environment-api/problems-v2) to ingest problem events into Microsoft Sentinel Log Analytics","[{""title"": ""Dynatrace Problem Events to Microsoft Sentinel"", ""description"": ""Follow [these instructions](https://docs.dynatrace.com/docs/shortlink/token#create-api-token) to generate an access token."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Dynatrace tenant (ex. xyz.dynatrace.com)"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{dynatraceEnvironmentUrl}}""}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Dynatrace tenant (ex. xyz.dynatrace.com)"", ""description"": ""You need a valid Dynatrace Tenant, to learn more about the Dynatrace platform [Start your free trial](https://www.dynatrace.com/trial).""}, {""name"": ""Dynatrace Access Token"", ""description"": ""You need a Dynatrace Access Token, the token should have ***Read problems*** (problems.read) scope.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynatrace/Data%20Connectors/Connector_Dynatrace_Problems.json","true" +"DynatraceSecurityProblems_CL","Dynatrace","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynatrace","dynatrace","dynatrace_azure_sentinel","2022-10-18","2023-10-16","","Dynatrace","Partner","https://www.dynatrace.com/services-support/","","domains","DynatraceRuntimeVulnerabilities","Dynatrace","Dynatrace Runtime Vulnerabilities","This connector uses the [Dynatrace Security Problem REST API](https://docs.dynatrace.com/docs/dynatrace-api/environment-api/application-security/vulnerabilities/get-vulnerabilities) to ingest detected runtime vulnerabilities into Microsoft Sentinel Log Analytics.","[{""title"": ""Dynatrace Vulnerabilities Events to Microsoft Sentinel"", ""description"": ""Configure and Enable Dynatrace [Application Security](https://www.dynatrace.com/platform/application-security/). \n Follow [these instructions](https://docs.dynatrace.com/docs/shortlink/token#create-api-token) to generate an access token."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Dynatrace tenant (ex. xyz.dynatrace.com)"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{dynatraceEnvironmentUrl}}""}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Dynatrace tenant (ex. xyz.dynatrace.com)"", ""description"": ""You need a valid Dynatrace tenant with [Application Security](https://www.dynatrace.com/platform/application-security/) enabled, learn more about the [Dynatrace platform](https://www.dynatrace.com/).""}, {""name"": ""Dynatrace Access Token"", ""description"": ""You need a Dynatrace Access Token, the token should have ***Read security problems*** (securityProblems.read) scope.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynatrace/Data%20Connectors/Connector_Dynatrace_RuntimeVulnerabilities.json","true" +"ESETInspect_CL","ESET Inspect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESET%20Inspect","esetresearch1579795941720","eset_enterprise_inspector_mss","2022-06-01","","","ESET Enterprise","Partner","https://www.eset.com/int/business/solutions/endpoint-detection-and-response/","","domains","ESETInspect","ESET Netherlands","ESET Inspect","This connector will ingest detections from [ESET Inspect](https://www.eset.com/int/business/solutions/xdr-extended-detection-and-response/) using the provided [REST API](https://help.eset.com/ei_navigate/latest/en-US/api.html). This API is present in ESET Inspect version 1.4 and later.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to ESET Inspect to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Step 1 - Create an API user"", ""description"": ""1. Log into the ESET PROTECT console with an administrator account, select the **More** tab and the **Users** subtab. \n2. Click on the **ADD NEW** button and add a **native user**.\n3. Create a new user for the API account. **Optional:** Select a **Home group** other than **All** to limit what detections are ingested. \n4. Under the **Permission Sets** tab, assign the **Inspect reviewer permission set**.\n4. Log out of the administrator account and log into the console with the new API credentials for validation, then log out of the API account. \n5.""}, {""title"": ""Step 2 - Copy Workspace ID and Key"", ""description"": "">**IMPORTANT:** Before deploying the ESET Inspect connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Step 3 - Deploy the Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the ESET Inspect connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESETInspect-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password** , enter the **Inspect base URL** and the **first ID** to start ingesting detections from.\n - The defailt starting ID is **0**. This means that all detections will be ingested. \n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labelled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Access to the ESET PROTECT console"", ""description"": ""Permissions to add users""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESET%20Inspect/Data%20Connectors/ESETInspect_API_FunctionApp.json","true" +"IntegrationTableIncidents_CL","ESET Protect Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESET%20Protect%20Platform","eset","eset-protect-platform-solution","2024-10-29","2025-06-17","","ESET Enterprise Integrations","Partner","https://help.eset.com/eset_connect/en-US/integrations.html","","domains","ESETProtectPlatform","ESET","ESET Protect Platform","The ESET Protect Platform data connector enables users to inject detections data from [ESET Protect Platform](https://www.eset.com/int/business/protect-platform/) using the provided [Integration REST API](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESET%20Protect%20Platform/Data%20Connectors). Integration REST API runs as scheduled Azure Function App.","[{""description"": "">**NOTE:** The ESET Protect Platform data connector uses Azure Functions to connect to the ESET Protect Platform via Eset Connect API to pull detections logs into Microsoft Sentinel. This process might result in additional data ingestion costs. See details on the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/).""}, {""description"": "">**NOTE:** The newest version of the ESET PROTECT Platform and Microsoft Sentinel integration pulls not only detections logs but also newly created incidents. If your integration was set up before 20.06.2025, please follow [these steps](https://help.eset.com/eset_connect/en-US/update_ms_sentinel_integration.html) to update it.""}, {""title"": ""Step 1 - Create an API user"", ""description"": ""Use this [instruction](https://help.eset.com/eset_connect/en-US/create_api_user_account.html) to create an ESET Connect API User account with **Login** and **Password**.""}, {""title"": ""Step 2 - Create a registered application"", ""description"": ""Create a Microsoft Entra ID registered application by following the steps in the [Register a new application instruction.](https://learn.microsoft.com/en-us/entra/identity-platform/quickstart-register-app)""}, {""title"": ""Step 3 - Deploy the ESET Protect Platform data connector using the Azure Resource Manager (ARM) template"", ""description"": ""\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-EsetProtectionPlatform-azuredeploy)\n\n2. Select the name of the **Log Analytics workspace** associated with your Microsoft Sentinel. Select the same **Resource Group** as the Resource Group of the Log Analytics workspace.\n\n3. Type the parameters of the registered application in Microsoft Entra ID: **Azure Client ID**, **Azure Client Secret**, **Azure Tenant ID**, **Object ID**. You can find the **Object ID** on Azure Portal by following this path \n> Microsoft Entra ID -> Manage (on the left-side menu) -> Enterprise applications -> Object ID column (the value next to your registered application name).\n\n4. Provide the ESET Connect API user account **Login** and **Password** obtained in **Step 1**.\n\n5. Select one or more ESET products (ESET PROTECT, ESET Inspect, ESET Cloud Office Security) from which detections are retrieved.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to register an application in Microsoft Entra ID"", ""description"": ""Sufficient permissions to register an application with your Microsoft Entra tenant are required.""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign the Monitoring Metrics Publisher role to the registered application in Microsoft Entra ID is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESET%20Protect%20Platform/Data%20Connectors/ESETProtectPlatform_API_FunctionApp.json","true" +"IntegrationTable_CL","ESET Protect Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESET%20Protect%20Platform","eset","eset-protect-platform-solution","2024-10-29","2025-06-17","","ESET Enterprise Integrations","Partner","https://help.eset.com/eset_connect/en-US/integrations.html","","domains","ESETProtectPlatform","ESET","ESET Protect Platform","The ESET Protect Platform data connector enables users to inject detections data from [ESET Protect Platform](https://www.eset.com/int/business/protect-platform/) using the provided [Integration REST API](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESET%20Protect%20Platform/Data%20Connectors). Integration REST API runs as scheduled Azure Function App.","[{""description"": "">**NOTE:** The ESET Protect Platform data connector uses Azure Functions to connect to the ESET Protect Platform via Eset Connect API to pull detections logs into Microsoft Sentinel. This process might result in additional data ingestion costs. See details on the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/).""}, {""description"": "">**NOTE:** The newest version of the ESET PROTECT Platform and Microsoft Sentinel integration pulls not only detections logs but also newly created incidents. If your integration was set up before 20.06.2025, please follow [these steps](https://help.eset.com/eset_connect/en-US/update_ms_sentinel_integration.html) to update it.""}, {""title"": ""Step 1 - Create an API user"", ""description"": ""Use this [instruction](https://help.eset.com/eset_connect/en-US/create_api_user_account.html) to create an ESET Connect API User account with **Login** and **Password**.""}, {""title"": ""Step 2 - Create a registered application"", ""description"": ""Create a Microsoft Entra ID registered application by following the steps in the [Register a new application instruction.](https://learn.microsoft.com/en-us/entra/identity-platform/quickstart-register-app)""}, {""title"": ""Step 3 - Deploy the ESET Protect Platform data connector using the Azure Resource Manager (ARM) template"", ""description"": ""\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-EsetProtectionPlatform-azuredeploy)\n\n2. Select the name of the **Log Analytics workspace** associated with your Microsoft Sentinel. Select the same **Resource Group** as the Resource Group of the Log Analytics workspace.\n\n3. Type the parameters of the registered application in Microsoft Entra ID: **Azure Client ID**, **Azure Client Secret**, **Azure Tenant ID**, **Object ID**. You can find the **Object ID** on Azure Portal by following this path \n> Microsoft Entra ID -> Manage (on the left-side menu) -> Enterprise applications -> Object ID column (the value next to your registered application name).\n\n4. Provide the ESET Connect API user account **Login** and **Password** obtained in **Step 1**.\n\n5. Select one or more ESET products (ESET PROTECT, ESET Inspect, ESET Cloud Office Security) from which detections are retrieved.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to register an application in Microsoft Entra ID"", ""description"": ""Sufficient permissions to register an application with your Microsoft Entra tenant are required.""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign the Monitoring Metrics Publisher role to the registered application in Microsoft Entra ID is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESET%20Protect%20Platform/Data%20Connectors/ESETProtectPlatform_API_FunctionApp.json","true" +"Syslog","ESETPROTECT","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESETPROTECT","cyberdefensegroupbv1625581149103","eset_protect","2021-10-20","","","ESET Netherlands","Partner","https://techcenter.eset.nl/en/","","domains","ESETPROTECT","ESET","[Deprecated] ESET PROTECT","This connector gathers all events generated by ESET software through the central management solution ESET PROTECT (formerly ESET Security Management Center). This includes Anti-Virus detections, Firewall detections but also more advanced EDR detections. For a complete list of events please refer to [the documentation](https://help.eset.com/protect_admin/latest/en-US/events-exported-to-json-format.html).","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias ESETPROTECT and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESETPROTECT/Parsers/ESETPROTECT.txt).The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n2. Select **Apply below configuration to my machines** and select the facilities and severities. The default ESET PROTECT facility is **user**.\n3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure ESET PROTECT"", ""description"": ""Configure ESET PROTECT to send all events through Syslog.\n\n1. Follow [these instructions](https://help.eset.com/protect_admin/latest/en-US/admin_server_settings_syslog.html) to configure syslog output. Make sure to select **BSD** as the format and **TCP** as the transport.\n\n2. Follow [these instructions](https://help.eset.com/protect_admin/latest/en-US/admin_server_settings_export_to_syslog.html) to export all logs to syslog. Select **JSON** as the output format.\n\nNote:- Refer to the [documentation](https://learn.microsoft.com/en-us/azure/sentinel/connect-log-forwarder?tabs=rsyslog#security-considerations) for setting up the log forwarder for both local and cloud storage."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESETPROTECT/Data%20Connectors/Connector_Syslog_ESETPROTECT.json","true" +"","EatonForeseer","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/EatonForeseer","azuresentinel","azure-sentinel-solution-eatonforeseer","2022-06-28","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","EclecticIQ","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/EclecticIQ","azuresentinel","azure-sentinel-solution-eclecticiqtip","2022-09-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"EgressDefend_CL","Egress Defend","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Egress%20Defend","egress1589289169584","azure-sentinel-solution-egress-defend","2023-07-27","","","egress1589289169584","Partner","https://support.egress.com/s/","","domains","EgressDefendPolling","Egress Software Technologies","Egress Defend","The Egress Defend audit connector provides the capability to ingest Egress Defend Data into Microsoft Sentinel.","[{""title"": ""Connect Egress Defend with Microsoft Sentinel"", ""description"": ""Enter your Egress Defend API URl, Egress Domain and API token."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""API URL"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{apiUrl}}""}, {""displayText"": ""Domain name"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{domain}}""}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions on the Log Analytics workspace are required to enable the data connector."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true, ""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Egress API Token"", ""description"": ""An Egress API token is required to ingest audit records to Microsoft Sentinel.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Egress%20Defend/Data%20Connectors/DefendAPIConnector.json","true" +"DefendAuditData","Egress Iris","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Egress%20Iris","egress1589289169584","egress-sentinel","2024-03-11","","","Egress Software Technologies Ltd","Partner","https://support.egress.com","","domains","EgressSiemPolling","Egress Software Technologies","Egress Iris Connector","The Egress Iris connector will allow you to ingest Egress data into Sentinel.","[{""title"": ""Connect Egress Data with Microsoft Sentinel"", ""description"": ""Enter your Egress API Hostname and secret."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Hostname"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{hostname}}""}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions on the Log Analytics workspace are required to enable the data connector."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true, ""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Egress API Token"", ""description"": ""An Egress API token is required to ingest audit records to Microsoft Sentinel.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Egress%20Iris/Data%20Connectors/EgressDataConnector.json","true" +"EgressEvents_CL","Egress Iris","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Egress%20Iris","egress1589289169584","egress-sentinel","2024-03-11","","","Egress Software Technologies Ltd","Partner","https://support.egress.com","","domains","EgressSiemPolling","Egress Software Technologies","Egress Iris Connector","The Egress Iris connector will allow you to ingest Egress data into Sentinel.","[{""title"": ""Connect Egress Data with Microsoft Sentinel"", ""description"": ""Enter your Egress API Hostname and secret."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Hostname"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{hostname}}""}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions on the Log Analytics workspace are required to enable the data connector."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true, ""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Egress API Token"", ""description"": ""An Egress API token is required to ingest audit records to Microsoft Sentinel.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Egress%20Iris/Data%20Connectors/EgressDataConnector.json","true" +"","Elastic Search","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Elastic%20Search","azuresentinel","azure-sentinel-solution-elasticsearch","2022-09-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"ElasticAgentLogs_CL","ElasticAgent","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ElasticAgent","azuresentinel","azure-sentinel-solution-elasticagent","2021-11-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ElasticAgent","Elastic","Elastic Agent","The [Elastic Agent](https://www.elastic.co/security) data connector provides the capability to ingest Elastic Agent logs, metrics, and security data into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**ElasticAgentEvent**](https://aka.ms/sentinel-ElasticAgent-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using **Elastic Agent 7.14**."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Server where the Elastic Agent logs are forwarded.\n\n> Logs from Elastic Agents deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure Elastic Agent (Standalone)"", ""description"": ""[Follow the instructions](https://www.elastic.co/guide/en/fleet/current/elastic-agent-configuration.html) to configure Elastic Agent to output to Logstash""}, {""title"": ""3. Configure Logstash to use Microsoft Logstash Output Plugin"", ""description"": ""Follow the steps to configure Logstash to use microsoft-logstash-output-azure-loganalytics plugin:\n\n3.1) Check if the plugin is already installed:\n> ./logstash-plugin list | grep 'azure-loganalytics'\n**(if the plugin is installed go to step 3.3)**\n\n3.2) Install plugin:\n> ./logstash-plugin install microsoft-logstash-output-azure-loganalytics\n\n3.3) [Configure Logstash](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/microsoft-logstash-output-azure-loganalytics) to use the plugin""}, {""title"": ""4. Validate log ingestion"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using custom table specified in step 3.3 (e.g. ElasticAgentLogs_CL).\n\n>It may take about 30 minutes until the connection streams data to your workspace.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Include custom pre-requisites if the connectivity requires - else delete customs"", ""description"": ""Description for any custom pre-requisite""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ElasticAgent/Data%20Connectors/Connector_ElasticAgent.json","true" +"","Endace","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Endace","azuresentinel","azure-sentinel-solution-endace","2025-03-24","","","Endace","Partner","https://endace.com","","domains","","","","","","","","false" +"","Endpoint Threat Protection Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Endpoint%20Threat%20Protection%20Essentials","azuresentinel","azure-sentinel-solution-endpointthreat","2022-11-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","Entrust identity as Service","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Entrust%20identity%20as%20Service","azuresentinel","azure-sentinel-solution-entrustidentity","2023-05-22","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"ErmesBrowserSecurityEvents_CL","Ermes Browser Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Ermes%20Browser%20Security","ermes","azure-sentinel-solution-ermes-browser-security","2023-09-29","","","Ermes Cyber Security S.p.A.","Partner","https://www.ermes.company","","domains","ErmesBrowserSecurityEvents","Ermes Cyber Security S.p.A.","Ermes Browser Security Events","Ermes Browser Security Events","[{""description"": ""Connect using OAuth2 credentials"", ""instructions"": [{""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}], ""title"": ""Connect Ermes Browser Security Events to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Ermes Client Id and Client Secret"", ""description"": ""Enable API access in Ermes. Please contact [Ermes Cyber Security](https://www.ermes.company) support for more information.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Ermes%20Browser%20Security/Data%20Connectors/ErmesBrowserSecurityEvents_ccp/data_connector_definition.json","true" +"eset_CL","Eset Security Management Center","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Eset%20Security%20Management%20Center","esetresearch1579795941720","Eset_Security_Management_Center_MSS","2022-05-11","","","Eset","partner","https://support.eset.com/en","","domains","EsetSMC","Eset","Eset Security Management Center","Connector for [Eset SMC](https://help.eset.com/esmc_admin/72/en-US/) threat events, audit logs, firewall events and web sites filter.","[{""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure rsyslog to accept logs from your Eset SMC IP address.\n\n```\nsudo -i\r\n\r\n# Set ESET SMC source IP address\r\nexport ESETIP={Enter your IP address}\r\n\r\n# Create rsyslog configuration file\r\ncat > /etc/rsyslog.d/80-remote.conf << EOF\r\n\\$ModLoad imudp\r\n\\$UDPServerRun 514\r\n\\$ModLoad imtcp\r\n\\$InputTCPServerRun 514\r\n\\$AllowedSender TCP, 127.0.0.1, $ESETIP\r\n\\$AllowedSender UDP, 127.0.0.1, $ESETIP\r\nuser.=alert;user.=crit;user.=debug;user.=emerg;user.=err;user.=info;user.=notice;user.=warning @127.0.0.1:25224\r\nEOF\r\n\r\n# Restart rsyslog\r\nsystemctl restart rsyslog```""}, {""title"": ""3. Configure OMS agent to pass Eset SMC data in API format"", ""description"": ""In order to easily recognize Eset data we will push it to separate table and parse at agent so query in Azure Sentinel is easier and fast. To make it simple we will just modify ```match oms.**``` section to send data as API objects by changing type to out_oms_api. Modify file on /etc/opt/microsoft/omsagent/{REPLACEyourworkspaceid}/conf/omsagent.conf. Full ```match oms.**``` section looks like this:\r\n\r\n```\r\n\r\n type out_oms_api\r\n log_level info\r\n num_threads 5\r\n run_in_background false\r\n\r\n omsadmin_conf_path /etc/opt/microsoft/omsagent/{REPLACEyourworkspaceid}/conf/omsadmin.conf\r\n cert_path /etc/opt/microsoft/omsagent/{REPLACEyourworkspaceid}/certs/oms.crt\r\n key_path /etc/opt/microsoft/omsagent/{REPLACEyourworkspaceid}/certs/oms.key\r\n\r\n buffer_chunk_limit 15m\r\n buffer_type file\r\n buffer_path /var/opt/microsoft/omsagent/{REPLACEyourworkspaceid}/state/out_oms_common*.buffer\r\n\r\n buffer_queue_limit 10\r\n buffer_queue_full_action drop_oldest_chunk\r\n flush_interval 20s\r\n retry_limit 10\r\n retry_wait 30s\r\n max_retry_wait 9m\r\n\r\n```\r\n""}, {""title"": ""4. Change OMS agent configuration to catch tag oms.api.eset and parse structured data"", ""description"": ""Modify file /etc/opt/microsoft/omsagent/{REPLACEyourworkspaceid}/conf/omsagent.d/syslog.conf\n```\r\n\r\n type syslog\r\n port 25224\r\n bind 127.0.0.1\r\n protocol_type udp\r\n tag oms.api.eset\r\n\r\n\r\n\r\n @type parser\r\n key_name message\r\n format /(?.*?{.*})/\r\n\r\n\r\n\r\n @type parser\r\n key_name message\r\n format json\r\n\r\n```""}, {""title"": ""5. Disable automatic configuration and restart agent"", ""description"": ""```bash\r\n# Disable changes to configuration files from Portal\r\nsudo su omsagent -c 'python /opt/microsoft/omsconfig/Scripts/OMS_MetaConfigHelper.py --disable'\r\n\r\n# Restart agent\r\nsudo /opt/microsoft/omsagent/bin/service_control restart\r\n\r\n# Check agent logs\r\ntail -f /var/opt/microsoft/omsagent/log/omsagent.log\r\n```""}, {""title"": ""6. Configure Eset SMC to send logs to connector"", ""description"": ""Configure Eset Logs using BSD style and JSON format.\r\n- Go to Syslog server configuration as described in [Eset documentation](https://help.eset.com/esmc_admin/72/en-US/admin_server_settings.html?admin_server_settings_syslog.html) and configure Host (your connector), Format BSD, Transport TCP\r\n- Go to Logging section as described in [Eset documentation](https://help.eset.com/esmc_admin/72/en-US/admin_server_settings.html?admin_server_settings_export_to_syslog.html) and enable JSON""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Access to Eset SMC console"", ""description"": ""Permissions to configure log export""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Eset%20Security%20Management%20Center/Data%20Connectors/esetSmc.json","true" +"Syslog","Exabeam Advanced Analytics","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Exabeam%20Advanced%20Analytics","azuresentinel","azure-sentinel-solution-exabeamadvancedanalytics","2022-05-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Exabeam","Exabeam","[Deprecated] Exabeam Advanced Analytics","The [Exabeam Advanced Analytics](https://www.exabeam.com/ueba/advanced-analytics-and-mitre-detect-and-stop-threats/) data connector provides the capability to ingest Exabeam Advanced Analytics events into Microsoft Sentinel. Refer to [Exabeam Advanced Analytics documentation](https://docs.exabeam.com/) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Exabeam Advanced Analytics and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Exabeam%20Advanced%20Analytics/Parsers/ExabeamEvent.txt), on the second line of the query, enter the hostname(s) of your Exabeam Advanced Analytics device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using Exabeam Advanced Analytics i54 (Syslog)"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the server where the Exabeam Advanced Analytic logs are generated or forwarded.\n\n> Logs from Exabeam Advanced Analytic deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the custom log directory to be collected"", ""instructions"": [{""parameters"": {""linkType"": ""OpenCustomLogsSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure Exabeam event forwarding to Syslog"", ""description"": ""[Follow these instructions](https://docs.exabeam.com/en/advanced-analytics/i56/advanced-analytics-administration-guide/125351-advanced-analytics.html#UUID-7ce5ff9d-56aa-93f0-65de-c5255b682a08) to send Exabeam Advanced Analytics activity log data via syslog.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Exabeam%20Advanced%20Analytics/Data%20Connectors/Connector_Exabeam_Syslog.json","true" +"ExtraHop_Detections_CL","ExtraHop","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ExtraHop","extrahop","extrahop-revealx-sentinel","2025-02-11","2025-06-04","","ExtraHop Support","Partner","https://www.extrahop.com/customer-support","","domains","ExtraHop","ExtraHop","ExtraHop Detections Data Connector","The [ExtraHop](https://extrahop.com/) Detections Data Connector enables you to import detection data from ExtraHop RevealX to Microsoft Sentinel through webhook payloads.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ExtraHop in which logs are pushed via ExtraHop webhook and it will ingest logs into Microsoft Sentinel. Furthermore, the connector will fetch the ingested data from the custom logs table and create Threat Intelligence Indicators into Microsoft Sentinel Threat Intelligence. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias **ExtraHopDetections** and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ExtraHop/Parsers/ExtraHopDetections.yaml). The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the ExtraHop Microsoft Sentinel data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Complete the following steps for automated deployment of the ExtraHop Detections Data Connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ExtraHop-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the values for the following parameters:\n\n\t a. **Function Name** - Enter the Function Name you want. \n\n\t b. **Workspace ID** - Enter the Workspace ID of the log analytics Workspace. \n\n\t c. **Workspace Key** - Enter the Workspace Key of the log analytics Workspace. \n\n\t d. **Detections Table Name** - Enter the name of the table used to store ExtraHop detection data. \n\n\t e. **LogLevel** - Select Debug, Info, Error, or Warning for the log level or log severity value. \n\n\t f. **AppInsightsWorkspaceResourceID** - Enter the value of the 'Log Analytic Workspace-->Properties-->Resource ID' property. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Complete the following steps to manually deploy the ExtraHop Detections Data Connector with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": ""1) Deploy a Function App"", ""description"": ""> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-ExtraHop-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ExtraHopXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": ""2) Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with the following respective values (case-sensitive): \n\n\t a. **Function Name** - Enter the Function Name you want. \n\n\t b. **Workspace ID** - Enter the Workspace ID of the log analytics Workspace. \n\n\tc. **Workspace Key** - Enter the Workspace Key of the log analytics Workspace. \n\n\td. **Detections Table Name** - Enter the name of the table used to store ExtraHop detection data. \n\n\te. **LogLevel** - Select Debug, Info, Error, or Warning for the log level or log severity value. \n\n\t f. **logAnalyticsUri (optional)** - Configure this option to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}, {""title"": """", ""description"": ""**STEP 2 - Post Deployment**\n\n""}, {""title"": ""1) Get the Function App endpoint"", ""description"": ""1. Go to the Azure function overview page and click the **\""Functions\""** tab.\n2. Click on the function called **\""ExtraHopHttpStarter\""**.\n3. Go to **\""GetFunctionurl\""** and copy the function url available under **\""default (Function key)\""**.\n4. Replace **{functionname}** with **\""ExtraHopDetectionsOrchestrator\""** in copied function url.""}, {""title"": ""2) Configure a connection to Microsoft Sentinel and specify webhook payload criteria from RevealX"", ""description"": ""From your ExtraHop system, configure the Microsoft Sentinel integration to establish a connection between Microsoft Sentinel and ExtraHop RevealX and to create detection notification rules that will send webhook data to Microsoft Sentinel. For detailed instructions, refer to [Integrate ExtraHop RevealX with Microsoft Sentinel SIEM](https://docs.extrahop.com/current/integrations-microsoft-sentinel-siem/).""}, {""title"": """", ""description"": ""*After notification rules have been configured and Microsoft Sentinel is receiving webhook data, the Function App is triggered and you can view ExtraHop detections from the Log Analytics workspace table named \""ExtraHop_Detections_CL\"".*\n\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ExtraHop RevealX permissions"", ""description"": ""The following is required on your ExtraHop RevealX system:\n 1.Your RevealX system must be running firmware version 9.9.2 or later.\n 2.Your RevealX system must be connected to ExtraHop Cloud Services.\n 3.Your user account must have System Administratin privileges on RevealX 360 or Full Write privileges on RevealX Enterprise.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ExtraHop/Data%20Connectors/ExtraHopDataConnector/ExtraHop_FunctionApp.json","true" +"CommonSecurityLog","ExtraHop Reveal(x)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ExtraHop%20Reveal%28x%29","extrahop","extrahop_revealx_mss","2022-05-19","","","ExtraHop","Partner","https://www.extrahop.com/support/","","domains","ExtraHopNetworks","ExtraHop Networks","[Deprecated] ExtraHop Reveal(x) via Legacy Agent","The ExtraHop Reveal(x) data connector enables you to easily connect your Reveal(x) system with Microsoft Sentinel to view dashboards, create custom alerts, and improve investigation. This integration gives you the ability to gain insight into your organization's network and improve your security operation capabilities.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python --version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward ExtraHop Networks logs to Syslog agent"", ""description"": ""1. Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure to send the logs to port 514 TCP on the machine IP address.\n2. Follow the directions to install the [ExtraHop Detection SIEM Connector bundle](https://aka.ms/asi-syslog-extrahop-forwarding) on your Reveal(x) system. The SIEM Connector is required for this integration.\n3. Enable the trigger for **ExtraHop Detection SIEM Connector - CEF**\n4. Update the trigger with the ODS syslog targets you created\u00a0\n5. The Reveal(x) system formats syslog messages in Common Event Format (CEF) and then sends data to Microsoft Sentinel.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python --version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""ExtraHop"", ""description"": ""ExtraHop Discover or Command appliance with firmware version 7.8 or later with a user account that has Unlimited (administrator) privileges.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ExtraHop%20Reveal%28x%29/Data%20Connectors/template_ExtraHopNetworks.json","true" +"CommonSecurityLog","ExtraHop Reveal(x)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ExtraHop%20Reveal%28x%29","extrahop","extrahop_revealx_mss","2022-05-19","","","ExtraHop","Partner","https://www.extrahop.com/support/","","domains","ExtraHopNetworksAma","ExtraHop Networks","[Deprecated] ExtraHop Reveal(x) via AMA","The ExtraHop Reveal(x) data connector enables you to easily connect your Reveal(x) system with Microsoft Sentinel to view dashboards, create custom alerts, and improve investigation. This integration gives you the ability to gain insight into your organization's network and improve your security operation capabilities.","[{""title"": """", ""description"": """", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine""}, {""title"": ""Step B. Forward ExtraHop Networks logs to Syslog agent"", ""description"": ""1. Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure to send the logs to port 514 TCP on the machine IP address.\n2. Follow the directions to install the [ExtraHop Detection SIEM Connector bundle](https://aka.ms/asi-syslog-extrahop-forwarding) on your Reveal(x) system. The SIEM Connector is required for this integration.\n3. Enable the trigger for **ExtraHop Detection SIEM Connector - CEF**\n4. Update the trigger with the ODS syslog targets you created\u00a0\n5. The Reveal(x) system formats syslog messages in Common Event Format (CEF) and then sends data to Microsoft Sentinel.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ExtraHop%20Reveal%28x%29/Data%20Connectors/template_ExtraHopReveal%28x%29AMA.json","true" +"F5Telemetry_ASM_CL","F5 BIG-IP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20BIG-IP","f5-networks","f5_bigip_mss","2022-05-25","","","F5 Networks","Partner","https://support.f5.com/csp/home","","domains","F5BigIp","F5 Networks","F5 BIG-IP","The F5 firewall connector allows you to easily connect your F5 logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": ""Configure and connect F5 BIGIP"", ""description"": ""To connect your F5 BIGIP, you have to post a JSON declaration to the system\u2019s API endpoint. For instructions on how to do this, see [Integrating the F5 BGIP with Microsoft Sentinel](https://aka.ms/F5BigIp-Integrate)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20BIG-IP/Data%20Connectors/F5BigIp.json","true" +"F5Telemetry_LTM_CL","F5 BIG-IP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20BIG-IP","f5-networks","f5_bigip_mss","2022-05-25","","","F5 Networks","Partner","https://support.f5.com/csp/home","","domains","F5BigIp","F5 Networks","F5 BIG-IP","The F5 firewall connector allows you to easily connect your F5 logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": ""Configure and connect F5 BIGIP"", ""description"": ""To connect your F5 BIGIP, you have to post a JSON declaration to the system\u2019s API endpoint. For instructions on how to do this, see [Integrating the F5 BGIP with Microsoft Sentinel](https://aka.ms/F5BigIp-Integrate)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20BIG-IP/Data%20Connectors/F5BigIp.json","true" +"F5Telemetry_system_CL","F5 BIG-IP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20BIG-IP","f5-networks","f5_bigip_mss","2022-05-25","","","F5 Networks","Partner","https://support.f5.com/csp/home","","domains","F5BigIp","F5 Networks","F5 BIG-IP","The F5 firewall connector allows you to easily connect your F5 logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": ""Configure and connect F5 BIGIP"", ""description"": ""To connect your F5 BIGIP, you have to post a JSON declaration to the system\u2019s API endpoint. For instructions on how to do this, see [Integrating the F5 BGIP with Microsoft Sentinel](https://aka.ms/F5BigIp-Integrate)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20BIG-IP/Data%20Connectors/F5BigIp.json","true" +"CommonSecurityLog","F5 Networks","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20Networks","f5-networks","f5_networks_data_mss","2022-05-12","","","F5","Partner","https://www.f5.com/services/support","","domains","F5","F5 Networks","[Deprecated] F5 Networks via Legacy Agent","The F5 firewall connector allows you to easily connect your F5 logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python --version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Configure F5 to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent.\n\nGo to [F5 Configuring Application Security Event Logging](https://aka.ms/asi-syslog-f5-forwarding), follow the instructions to set up remote logging, using the following guidelines:\n\n1. Set the **Remote storage type** to CEF.\n2. Set the **Protocol setting** to UDP.\n3. Set the **IP address** to the Syslog server IP address.\n4. Set the **port number** to 514, or the port your agent uses.\n5. Set the **facility** to the one that you configured in the Syslog agent (by default, the agent sets this to local4).\n6. You can set the **Maximum Query String Size** to be the same as you configured.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python --version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20Networks/Data%20Connectors/template_F5.json","true" +"CommonSecurityLog","F5 Networks","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20Networks","f5-networks","f5_networks_data_mss","2022-05-12","","","F5","Partner","https://www.f5.com/services/support","","domains","F5Ama","F5 Networks","[Deprecated] F5 Networks via AMA","The F5 firewall connector allows you to easily connect your F5 logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": """", ""description"": """", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine""}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Configure F5 to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent.\n\nGo to [F5 Configuring Application Security Event Logging](https://aka.ms/asi-syslog-f5-forwarding), follow the instructions to set up remote logging, using the following guidelines:\n\n1. Set the **Remote storage type** to CEF.\n2. Set the **Protocol setting** to UDP.\n3. Set the **IP address** to the Syslog server IP address.\n4. Set the **port number** to 514, or the port your agent uses.\n5. Set the **facility** to the one that you configured in the Syslog agent (by default, the agent sets this to local4).\n6. You can set the **Maximum Query String Size** to be the same as you configured.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20Networks/Data%20Connectors/template_F5NetworksAMA.json","true" +"","FalconFriday","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/FalconFriday","falconforcebv1623147592118","falconfriday_content","2021-10-18","","","FalconForce","Partner","https://www.falconforce.nl/en/","","domains","","","","","","","","false" +"","Farsight DNSDB","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Farsight%20DNSDB","","","","","","","","","","","","","","","","","","false" +"feedly_indicators_CL","Feedly","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Feedly","feedlyinc1693853810319","azure-sentinel-solution-feedly","2023-08-01","","","Feedly Inc","Partner","https://feedly.com/i/support/contactUs","","domains","Feedly","Feedly","Feedly","This connector allows you to ingest IoCs from Feedly.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions and the Logs Ingestion API to pull IoCs from Feedly into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": ""Step 1 - Prepare Your Environment"", ""description"": ""The Feedly connector will automatically create:\n\n- **Custom Table**: `feedly_indicators_CL` with the required schema\n- **Data Collection Endpoint (DCE)**: For ingesting data\n- **Data Collection Rule (DCR)**: For processing and routing data\n\nNo manual resource creation is required - everything will be created during deployment!\n\nFor detailed instructions, see: [Migrate from HTTP Data Collector API to Logs Ingestion API](https://learn.microsoft.com/azure/azure-monitor/logs/custom-logs-migrate)""}, {""title"": ""Step 2 - Deploy the Connector"", ""description"": ""The ARM template will automatically:\n\n1. Create a managed identity for the Azure Function\n2. Assign the **Monitoring Metrics Publisher** role to the Function App on the DCR\n3. Configure all necessary permissions for data ingestion\n\nNo manual role assignments are required - everything is handled automatically during deployment!""}, {""title"": ""Step 3 - Get your Feedly API token"", ""description"": ""Go to https://feedly.com/i/team/api and generate a new API token for the connector.""}, {""title"": ""(Optional Step) Securely store credentials in Azure Key Vault"", ""description"": ""Azure Key Vault provides a secure mechanism to store and retrieve secrets. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App by using the `@Microsoft.KeyVault(SecretUri={Security Identifier})` schema.""}, {""title"": ""Step 4 - Deploy the connector"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function\n\n>**IMPORTANT:** Before deploying, gather the following information:\n- Feedly API Token and Stream IDs\n\nAll Azure Monitor resources (DCE, DCR, custom table, and role assignments) will be created automatically during deployment."", ""instructions"": [], ""innerSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Feedly connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-Feedly-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the required parameters:\n - **TableName**: Name for the custom table (default: `feedly_indicators_CL`)\n - **FeedlyApiKey**: Your Feedly API token from Step 3\n - **FeedlyStreamIds**: Comma-separated list of Feedly stream IDs\n - **DaysToBackfill**: Number of days to backfill (default: 7)\n\n>**Note**: If using Azure Key Vault secrets, use the `@Microsoft.KeyVault(SecretUri={Security Identifier})` schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Feedly connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""innerSteps"": [{""title"": ""1. Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://github.com/Azure/Azure-Sentinel/raw/refs/heads/master/Solutions/Feedly/Data%20Connectors/FeedlyAzureFunction.zip) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity Bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. FeedlyXXXX).\n\n\te. **Select a runtime:** Choose Python 3.10.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": ""2. Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive):\n\t\t- `DataCollectionEndpoint`: Will be populated automatically after DCE creation\n\t\t- `DcrImmutableId`: Will be populated automatically after DCR creation\n\t\t- `DcrStreamName`: `feedly_indicators_CL`\n\t\t- `FeedlyApiKey`: Your Feedly API token\n\t\t- `FeedlyStreamIds`: Comma-separated Feedly stream IDs\n\t\t- `DaysToBackfill`: Number of days to backfill (e.g., 7)\n\n**Note**: The Function App uses managed identity for authentication to Azure Monitor, so no Azure AD credentials are needed.\n\n>**Note**: Use Azure Key Vault references for sensitive values: `@Microsoft.KeyVault(SecretUri={Security Identifier})`\n\n4. Once all application settings have been entered, click **Save**.""}]}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Azure AD Application Registration"", ""description"": ""An Azure AD App Registration with client credentials and permissions to write to the Data Collection Rule. The application must be granted 'Monitoring Metrics Publisher' role on the DCR.""}, {""name"": ""Data Collection Endpoint and Rule"", ""description"": ""A Data Collection Endpoint (DCE) and Data Collection Rule (DCR) must be created before deploying this connector. [See the documentation to learn more](https://learn.microsoft.com/azure/azure-monitor/logs/custom-logs-migrate).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Feedly/Data%20Connectors/Feedly_API_AzureFunctionApp.json","true" +"CommonSecurityLog","FireEye Network Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/FireEye%20Network%20Security","azuresentinel","azure-sentinel-solution-fireeyenx","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","FireEyeNX","FireEye","[Deprecated] FireEye Network Security (NX) via Legacy Agent","The [FireEye Network Security (NX)](https://www.fireeye.com/products/network-security.html) data connector provides the capability to ingest FireEye Network Security logs into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**FireEyeNXEvent**](https://aka.ms/sentinel-FireEyeNX-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using FEOS release v9.0"", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Configure FireEye NX to send logs using CEF"", ""description"": ""Complete the following steps to send data using CEF:\n\n2.1. Log into the FireEye appliance with an administrator account\n\n2.2. Click **Settings**\n\n2.3. Click **Notifications**\n\nClick **rsyslog**\n\n2.4. Check the **Event type** check box\n\n2.5. Make sure Rsyslog settings are:\n\n- Default format: CEF\n\n- Default delivery: Per event\n\n- Default send as: Alert""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/FireEye%20Network%20Security/Data%20Connectors/Connector_FireEyeNX_CEF.json","true" +"CommonSecurityLog","FireEye Network Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/FireEye%20Network%20Security","azuresentinel","azure-sentinel-solution-fireeyenx","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","FireEyeNXAma","FireEye","[Deprecated] FireEye Network Security (NX) via AMA","The [FireEye Network Security (NX)](https://www.fireeye.com/products/network-security.html) data connector provides the capability to ingest FireEye Network Security logs into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**FireEyeNXEvent**](https://aka.ms/sentinel-FireEyeNX-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Configure FireEye NX to send logs using CEF"", ""description"": ""Complete the following steps to send data using CEF:\n\n2.1. Log into the FireEye appliance with an administrator account\n\n2.2. Click **Settings**\n\n2.3. Click **Notifications**\n\nClick **rsyslog**\n\n2.4. Check the **Event type** check box\n\n2.5. Make sure Rsyslog settings are:\n\n- Default format: CEF\n\n- Default delivery: Per event\n\n- Default send as: Alert"", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/FireEye%20Network%20Security/Data%20Connectors/template_FireEyeNX_CEFAMA.json","true" +"Firework_CL","Flare","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Flare","flaresystmesinc1617114736428","flare-systems-firework-sentinel","2021-10-20","","","Flare","Partner","https://flare.io/company/contact/","","domains","Flare","Flare","Flare","[Flare](https://flare.systems/platform/) connector allows you to receive data and intelligence from Flare on Microsoft Sentinel.","[{""title"": ""1. Creating an Alert Channel for Microsoft Sentinel"", ""description"": """", ""innerSteps"": [{""description"": ""As an organization administrator, authenticate on [Flare](https://app.flare.systems) and access the [team page](https://app.flare.systems#/team) to create a new alert channel.""}, {""description"": ""Click on 'Create a new alert channel' and select 'Microsoft Sentinel'. Enter your Shared Key And WorkspaceID. Save the Alert Channel. \n For more help and details, see our [Azure configuration documentation](https://docs.microsoft.com/azure/sentinel/connect-data-sources)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID"", ""value"": ""{0}""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary key"", ""value"": ""{0} ""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Associating your alert channel to an alert feed"", ""innerSteps"": [{""description"": ""At this point, you may configure alerts to be sent to Microsoft Sentinel the same way that you would configure regular email alerts.""}, {""description"": ""For a more detailed guide, refer to the Flare documentation.""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Required Flare permissions"", ""description"": ""only Flare organization administrators may configure the Microsoft Sentinel integration.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Flare/Data%20Connectors/Connector_REST_API_FlareSystemsFirework.json","true" +"CommonSecurityLog","Forcepoint CASB","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20CASB","microsoftsentinelcommunity","azure-sentinel-solution-forcepoint-casb","2022-05-19","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ForcepointCasb","Forcepoint CASB","[Deprecated] Forcepoint CASB via Legacy Agent","The Forcepoint CASB (Cloud Access Security Broker) Connector allows you to automatically export CASB logs and events into Microsoft Sentinel in real-time. This enriches visibility into user activities across locations and cloud applications, enables further correlation with data from Azure workloads and other feeds, and improves monitoring capability with Workbooks inside Microsoft Sentinel.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel. This machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version \n \n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}, {""title"": ""5. Forcepoint integration installation guide "", ""description"": ""To complete the installation of this Forcepoint product integration, follow the guide linked below.\n\n[Installation Guide >](https://frcpnt.com/casb-sentinel)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20CASB/Data%20Connectors/Forcepoint%20CASB.json","true" +"CommonSecurityLog","Forcepoint CASB","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20CASB","microsoftsentinelcommunity","azure-sentinel-solution-forcepoint-casb","2022-05-19","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ForcepointCasbAma","Forcepoint CASB","[Deprecated] Forcepoint CASB via AMA","The Forcepoint CASB (Cloud Access Security Broker) Connector allows you to automatically export CASB logs and events into Microsoft Sentinel in real-time. This enriches visibility into user activities across locations and cloud applications, enables further correlation with data from Azure workloads and other feeds, and improves monitoring capability with Workbooks inside Microsoft Sentinel.","[{""title"": """", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine.""}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}, {""title"": ""3. Forcepoint integration installation guide "", ""description"": ""To complete the installation of this Forcepoint product integration, follow the guide linked below.\n\n[Installation Guide >](https://frcpnt.com/casb-sentinel)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20CASB/Data%20Connectors/template_Forcepoint%20CASBAMA.json","true" +"CommonSecurityLog","Forcepoint CSG","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20CSG","microsoftsentinelcommunity","azure-sentinel-solution-forcepoint-csg","2022-05-10","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ForcepointCSG","Forcepoint","[Deprecated] Forcepoint CSG via Legacy Agent","Forcepoint Cloud Security Gateway is a converged cloud security service that provides visibility, control, and threat protection for users and data, wherever they are. For more information visit: https://www.forcepoint.com/product/cloud-security-gateway","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""This integration requires the Linux Syslog agent to collect your Forcepoint Cloud Security Gateway Web/Email logs on port 514 TCP as Common Event Format (CEF) and forward them to Microsoft Sentinel."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Your Data Connector Syslog Agent Installation Command is:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""2. Implementation options"", ""description"": ""The integration is made available with two implementations options."", ""innerSteps"": [{""title"": ""2.1 Docker Implementation"", ""description"": ""Leverages docker images where the integration component is already installed with all necessary dependencies.\n\nFollow the instructions provided in the Integration Guide linked below.\n\n[Integration Guide >](https://frcpnt.com/csg-sentinel)""}, {""title"": ""2.2 Traditional Implementation"", ""description"": ""Requires the manual deployment of the integration component inside a clean Linux machine.\n\nFollow the instructions provided in the Integration Guide linked below.\n\n[Integration Guide >](https://frcpnt.com/csg-sentinel)""}]}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version \n \n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF).""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20CSG/Data%20Connectors/ForcepointCloudSecurityGateway.json","true" +"CommonSecurityLog","Forcepoint CSG","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20CSG","microsoftsentinelcommunity","azure-sentinel-solution-forcepoint-csg","2022-05-10","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ForcepointCSGAma","Forcepoint","[Deprecated] Forcepoint CSG via AMA","Forcepoint Cloud Security Gateway is a converged cloud security service that provides visibility, control, and threat protection for users and data, wherever they are. For more information visit: https://www.forcepoint.com/product/cloud-security-gateway","[{""title"": """", ""description"": """", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine.""}, {""title"": ""Step B. Implementation options"", ""description"": ""The integration is made available with two implementations options."", ""innerSteps"": [{""title"": ""1. Docker Implementation"", ""description"": ""Leverages docker images where the integration component is already installed with all necessary dependencies.\n\nFollow the instructions provided in the Integration Guide linked below.\n\n[Integration Guide >](https://frcpnt.com/csg-sentinel)""}, {""title"": ""2. Traditional Implementation"", ""description"": ""Requires the manual deployment of the integration component inside a clean Linux machine.\n\nFollow the instructions provided in the Integration Guide linked below.\n\n[Integration Guide >](https://frcpnt.com/csg-sentinel)""}]}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF).""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20CSG/Data%20Connectors/template_ForcepointCloudSecurityGatewayAMA.json","true" +"ForcepointDLPEvents_CL","Forcepoint DLP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20DLP","microsoftsentinelcommunity","azure-sentinel-solution-forcepoint-dlp","2022-05-09","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","Forcepoint_DLP","Forcepoint","Forcepoint DLP","The Forcepoint DLP (Data Loss Prevention) connector allows you to automatically export DLP incident data from Forcepoint DLP into Microsoft Sentinel in real-time. This enriches visibility into user activities and data loss incidents, enables further correlation with data from Azure workloads and other feeds, and improves monitoring capability with Workbooks inside Microsoft Sentinel.","[{""title"": """", ""description"": ""Follow step by step instructions in the [Forcepoint DLP documentation for Microsoft Sentinel](https://frcpnt.com/dlp-sentinel) to configure this connector."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20DLP/Data%20Connectors/Forcepoint%20DLP.json","true" +"CommonSecurityLog","Forcepoint NGFW","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20NGFW","microsoftsentinelcommunity","azure-sentinel-solution-forcepoint-ngfw","2022-05-25","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ForcepointNgfw","Forcepoint","[Deprecated] Forcepoint NGFW via Legacy Agent","The Forcepoint NGFW (Next Generation Firewall) connector allows you to automatically export user-defined Forcepoint NGFW logs into Microsoft Sentinel in real-time. This enriches visibility into user activities recorded by NGFW, enables further correlation with data from Azure workloads and other feeds, and improves monitoring capability with Workbooks inside Microsoft Sentinel.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python - version \n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}, {""title"": ""5. Forcepoint integration installation guide "", ""description"": ""To complete the installation of this Forcepoint product integration, follow the guide linked below.\n\n[Installation Guide >](https://frcpnt.com/ngfw-sentinel)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20NGFW/Data%20Connectors/FORCEPOINT_NGFW.json","true" +"CommonSecurityLog","Forcepoint NGFW","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20NGFW","microsoftsentinelcommunity","azure-sentinel-solution-forcepoint-ngfw","2022-05-25","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ForcepointNgfwAma","Forcepoint","[Deprecated] Forcepoint NGFW via AMA","The Forcepoint NGFW (Next Generation Firewall) connector allows you to automatically export user-defined Forcepoint NGFW logs into Microsoft Sentinel in real-time. This enriches visibility into user activities recorded by NGFW, enables further correlation with data from Azure workloads and other feeds, and improves monitoring capability with Workbooks inside Microsoft Sentinel.","[{""title"": """", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine.""}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}, {""title"": ""3. Forcepoint integration installation guide "", ""description"": ""To complete the installation of this Forcepoint product integration, follow the guide linked below.\n\n[Installation Guide >](https://frcpnt.com/ngfw-sentinel)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20NGFW/Data%20Connectors/template_FORCEPOINT_NGFWAMA.json","true" +"Syslog","Forescout (Legacy)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forescout%20%28Legacy%29","azuresentinel","azure-sentinel-solution-forescout","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Forescout","Forescout","Forescout","The [Forescout](https://www.forescout.com/) data connector provides the capability to ingest [Forescout events](https://docs.forescout.com/bundle/syslog-3-6-1-h/page/syslog-3-6-1-h.How-to-Work-with-the-Syslog-Plugin.html) into Microsoft Sentinel. Refer to [Forescout documentation](https://docs.forescout.com/bundle/syslog-msg-3-6-tn/page/syslog-msg-3-6-tn.About-Syslog-Messages-in-Forescout.html) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**ForescoutEvent**](https://aka.ms/sentinel-forescout-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using Forescout Syslog Plugin version: v3.6"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Server where the Forescout logs are forwarded.\n\n> Logs from Forescout Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n 2. Select **Apply below configuration to my machines** and select the facilities and severities.\n 3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure Forescout event forwarding"", ""description"": ""Follow the configuration steps below to get Forescout logs into Microsoft Sentinel.\n1. [Select an Appliance to Configure.](https://docs.forescout.com/bundle/syslog-3-6-1-h/page/syslog-3-6-1-h.Select-an-Appliance-to-Configure.html)\n2. [Follow these instructions](https://docs.forescout.com/bundle/syslog-3-6-1-h/page/syslog-3-6-1-h.Send-Events-To-Tab.html#pID0E0CE0HA) to forward alerts from the Forescout platform to a syslog server.\n3. [Configure](https://docs.forescout.com/bundle/syslog-3-6-1-h/page/syslog-3-6-1-h.Syslog-Triggers.html) the settings in the Syslog Triggers tab.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forescout%20%28Legacy%29/Data%20Connectors/Forescout_syslog.json","true" +"ForescoutOtAlert_CL","Forescout eyeInspect for OT Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forescout%20eyeInspect%20for%20OT%20Security","forescout","azure-sentinel-eyeinspectotsecurity","2025-07-10","","","Forescout Technologies","Partner","https://www.forescout.com/support","","domains","Forescout_eyeInspect_for_OT_Security","Forescout","Forescout eyeInspect for OT Security","Forescout eyeInspect for OT Security connector allows you to connect Asset/Alert information from Forescout eyeInspect OT platform with Microsoft Sentinel, to view and analyze data using Log Analytics Tables and Workbooks. This gives you more insight into OT organization network and improves security operation capabilities.","[{""title"": ""Forescout eyeInspect OT Microsoft Sentinel Integration"", ""description"": ""Instructions on how to configure Forescout eyeInspect Microsoft Sentinel Integration are provided at Forescout eyeInspect Documentation Portal"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forescout%20eyeInspect%20for%20OT%20Security/Data%20Connectors/Forescout%20eyeInspect%20for%20OT%20Security.json","true" +"ForescoutOtAsset_CL","Forescout eyeInspect for OT Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forescout%20eyeInspect%20for%20OT%20Security","forescout","azure-sentinel-eyeinspectotsecurity","2025-07-10","","","Forescout Technologies","Partner","https://www.forescout.com/support","","domains","Forescout_eyeInspect_for_OT_Security","Forescout","Forescout eyeInspect for OT Security","Forescout eyeInspect for OT Security connector allows you to connect Asset/Alert information from Forescout eyeInspect OT platform with Microsoft Sentinel, to view and analyze data using Log Analytics Tables and Workbooks. This gives you more insight into OT organization network and improves security operation capabilities.","[{""title"": ""Forescout eyeInspect OT Microsoft Sentinel Integration"", ""description"": ""Instructions on how to configure Forescout eyeInspect Microsoft Sentinel Integration are provided at Forescout eyeInspect Documentation Portal"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forescout%20eyeInspect%20for%20OT%20Security/Data%20Connectors/Forescout%20eyeInspect%20for%20OT%20Security.json","true" +"ForescoutComplianceStatus_CL","ForescoutHostPropertyMonitor","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ForescoutHostPropertyMonitor","forescout","azure-sentinel-solution-forescout","2022-06-28","","","Forescout Technologies","Partner","https://www.forescout.com/support","","domains","ForescoutHostPropertyMonitor","Forescout","Forescout Host Property Monitor","The Forescout Host Property Monitor connector allows you to connect host/policy/compliance properties from Forescout platform with Microsoft Sentinel, to view, create custom incidents, and improve investigation. This gives you more insight into your organization network and improves your security operation capabilities.","[{""title"": """", ""description"": ""Instructions on how to configure Forescout Microsoft Sentinel plugin are provided at Forescout Documentation Portal (https://docs.forescout.com/bundle/microsoft-sentinel-module-v2-0-0-h)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Forescout Plugin requirement"", ""description"": ""Please make sure Forescout Microsoft Sentinel plugin is running on Forescout platform""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ForescoutHostPropertyMonitor/Data%20Connectors/ForescoutHostPropertyMonitor.json","true" +"ForescoutHostProperties_CL","ForescoutHostPropertyMonitor","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ForescoutHostPropertyMonitor","forescout","azure-sentinel-solution-forescout","2022-06-28","","","Forescout Technologies","Partner","https://www.forescout.com/support","","domains","ForescoutHostPropertyMonitor","Forescout","Forescout Host Property Monitor","The Forescout Host Property Monitor connector allows you to connect host/policy/compliance properties from Forescout platform with Microsoft Sentinel, to view, create custom incidents, and improve investigation. This gives you more insight into your organization network and improves your security operation capabilities.","[{""title"": """", ""description"": ""Instructions on how to configure Forescout Microsoft Sentinel plugin are provided at Forescout Documentation Portal (https://docs.forescout.com/bundle/microsoft-sentinel-module-v2-0-0-h)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Forescout Plugin requirement"", ""description"": ""Please make sure Forescout Microsoft Sentinel plugin is running on Forescout platform""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ForescoutHostPropertyMonitor/Data%20Connectors/ForescoutHostPropertyMonitor.json","true" +"ForescoutPolicyStatus_CL","ForescoutHostPropertyMonitor","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ForescoutHostPropertyMonitor","forescout","azure-sentinel-solution-forescout","2022-06-28","","","Forescout Technologies","Partner","https://www.forescout.com/support","","domains","ForescoutHostPropertyMonitor","Forescout","Forescout Host Property Monitor","The Forescout Host Property Monitor connector allows you to connect host/policy/compliance properties from Forescout platform with Microsoft Sentinel, to view, create custom incidents, and improve investigation. This gives you more insight into your organization network and improves your security operation capabilities.","[{""title"": """", ""description"": ""Instructions on how to configure Forescout Microsoft Sentinel plugin are provided at Forescout Documentation Portal (https://docs.forescout.com/bundle/microsoft-sentinel-module-v2-0-0-h)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Forescout Plugin requirement"", ""description"": ""Please make sure Forescout Microsoft Sentinel plugin is running on Forescout platform""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ForescoutHostPropertyMonitor/Data%20Connectors/ForescoutHostPropertyMonitor.json","true" +"CommonSecurityLog","ForgeRock Common Audit for CEF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ForgeRock%20Common%20Audit%20for%20CEF","publisherid_test","offerid_test","2022-05-04","","","Forgerock","Partner","https://www.forgerock.com/support","","domains","ForgeRock","ForgeRock Inc","[Deprecated] ForgeRock Identity Platform","The ForgeRock Identity Platform provides a single common auditing framework. Extract and aggregate log data across the entire platform with common audit (CAUD) event handlers and unique IDs so that it can be tracked holistically. Open and extensible, you can leverage audit logging and reporting capabilities for integration with Microsoft Sentinel via this CAUD for CEF connector.","[{""title"": ""Configuration for the ForgeRock Common Audit (CAUD) for Microsoft Sentinel"", ""description"": ""In ForgeRock, install and configure this Common Audit (CAUD) for Microsoft Sentinel per the documentation at https://github.com/javaservlets/SentinelAuditEventHandler. Next, in Azure, follow the below CEF steps.""}, {""title"": ""\n\n\n1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ForgeRock%20Common%20Audit%20for%20CEF/Data%20Connectors/ForgeRock_CEF.json","true" +"CommonSecurityLog","Fortinet FortiGate Next-Generation Firewall connector for Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiGate%20Next-Generation%20Firewall%20connector%20for%20Microsoft%20Sentinel","azuresentinel","azure-sentinel-solution-fortinetfortigate","2021-08-13","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Fortinet","Fortinet","[Deprecated] Fortinet via Legacy Agent","The Fortinet firewall connector allows you to easily connect your Fortinet logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python --version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py &&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Fortinet logs to Syslog agent"", ""description"": ""Set your Fortinet to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine\u2019s IP address.\n\n\nCopy the CLI commands below and:\n- Replace \""server <ip address>\"" with the Syslog agent's IP address.\n- Set the \""<facility_name>\"" to use the facility you configured in the Syslog agent (by default, the agent sets this to local4).\n- Set the Syslog port to 514, the port your agent uses.\n- To enable CEF format in early FortiOS versions, you may need to run the command \""set csv disable\"".\n\nFor more information, go to the [Fortinet Document Library](https://aka.ms/asi-syslog-fortinet-fortinetdocumentlibrary), choose your version, and use the \""Handbook\"" and \""Log Message Reference\"" PDFs.\n\n[Learn more >](https://aka.ms/CEF-Fortinet)"", ""instructions"": [{""parameters"": {""label"": ""Set up the connection using the CLI to run the following commands:"", ""value"": ""config log syslogd setting\n set status enable\nset format cef\nset port 514\nset server \nend"", ""rows"": 8}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python --version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py &&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiGate%20Next-Generation%20Firewall%20connector%20for%20Microsoft%20Sentinel/Data%20Connectors/Fortinet-FortiGate.json","true" +"CommonSecurityLog","Fortinet FortiGate Next-Generation Firewall connector for Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiGate%20Next-Generation%20Firewall%20connector%20for%20Microsoft%20Sentinel","azuresentinel","azure-sentinel-solution-fortinetfortigate","2021-08-13","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","FortinetAma","Fortinet","[Deprecated] Fortinet via AMA","The Fortinet firewall connector allows you to easily connect your Fortinet logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": """", ""description"": """", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine""}, {""title"": ""Step B. Forward Fortinet logs to Syslog agent"", ""description"": ""Set your Fortinet to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine\u2019s IP address.\n\n\nCopy the CLI commands below and:\n- Replace \""server <ip address>\"" with the Syslog agent's IP address.\n- Set the \""<facility_name>\"" to use the facility you configured in the Syslog agent (by default, the agent sets this to local4).\n- Set the Syslog port to 514, the port your agent uses.\n- To enable CEF format in early FortiOS versions, you may need to run the command \""set csv disable\"".\n\nFor more information, go to the [Fortinet Document Library](https://aka.ms/asi-syslog-fortinet-fortinetdocumentlibrary), choose your version, and use the \""Handbook\"" and \""Log Message Reference\"" PDFs.\n\n[Learn more >](https://aka.ms/CEF-Fortinet)"", ""instructions"": [{""parameters"": {""label"": ""Set up the connection using the CLI to run the following commands:"", ""value"": ""config log syslogd setting\n set status enable\nset format cef\nset port 514\nset server \nend"", ""rows"": 8}, ""type"": ""CopyableLabel""}]}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiGate%20Next-Generation%20Firewall%20connector%20for%20Microsoft%20Sentinel/Data%20Connectors/template_Fortinet-FortiGateAma.json","true" +"FncEventsDetections_CL","Fortinet FortiNDR Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiNDR%20Cloud","fortinet","fortindrcloud-sentinel","2024-01-15","","","Fortinet","Partner","https://www.fortinet.com/support","","domains","FortinetFortiNdrCloudDataConnector","Fortinet","Fortinet FortiNDR Cloud","The Fortinet FortiNDR Cloud data connector provides the capability to ingest [Fortinet FortiNDR Cloud](https://docs.fortinet.com/product/fortindr-cloud) data into Microsoft Sentinel using the FortiNDR Cloud API","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the FortiNDR Cloud API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/Fortinet%20FortiNDR%20Cloud/Parsers/Fortinet_FortiNDR_Cloud.md) to create the Kusto function alias **Fortinet_FortiNDR_Cloud**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Fortinet FortiNDR Cloud Logs Collection**\n\nThe provider should provide or link to detailed steps to configure the 'PROVIDER NAME APPLICATION NAME' API endpoint so that the Azure Function can authenticate to it successfully, get its authorization key or token, and pull the appliance's logs into Microsoft Sentinel.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Fortinet FortiNDR Cloud connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the as well as the FortiNDR Cloud API credentials (available in FortiNDR Cloud account management), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""**Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the Fortinet FortiNDR Cloud connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-FortinetFortiNDR-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**(Make sure using the same location as your Resource Group, and got the location supports Flex Consumption. \n3. Enter the **Workspace ID**, **Workspace Key**, **AwsAccessKeyId**, **AwsSecretAccessKey**, and/or Other required fields. \n4. Click **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""MetaStream Credentials"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **FortiNDR Cloud Account Code** are required to retrieve event data.""}, {""name"": ""API Credentials"", ""description"": ""**FortiNDR Cloud API Token**, **FortiNDR Cloud Account UUID** are required to retrieve detection data.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiNDR%20Cloud/Data%20Connectors/FortinetFortiNdrCloud_API_AzureFunctionApp.json","true" +"FncEventsObservation_CL","Fortinet FortiNDR Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiNDR%20Cloud","fortinet","fortindrcloud-sentinel","2024-01-15","","","Fortinet","Partner","https://www.fortinet.com/support","","domains","FortinetFortiNdrCloudDataConnector","Fortinet","Fortinet FortiNDR Cloud","The Fortinet FortiNDR Cloud data connector provides the capability to ingest [Fortinet FortiNDR Cloud](https://docs.fortinet.com/product/fortindr-cloud) data into Microsoft Sentinel using the FortiNDR Cloud API","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the FortiNDR Cloud API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/Fortinet%20FortiNDR%20Cloud/Parsers/Fortinet_FortiNDR_Cloud.md) to create the Kusto function alias **Fortinet_FortiNDR_Cloud**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Fortinet FortiNDR Cloud Logs Collection**\n\nThe provider should provide or link to detailed steps to configure the 'PROVIDER NAME APPLICATION NAME' API endpoint so that the Azure Function can authenticate to it successfully, get its authorization key or token, and pull the appliance's logs into Microsoft Sentinel.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Fortinet FortiNDR Cloud connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the as well as the FortiNDR Cloud API credentials (available in FortiNDR Cloud account management), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""**Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the Fortinet FortiNDR Cloud connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-FortinetFortiNDR-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**(Make sure using the same location as your Resource Group, and got the location supports Flex Consumption. \n3. Enter the **Workspace ID**, **Workspace Key**, **AwsAccessKeyId**, **AwsSecretAccessKey**, and/or Other required fields. \n4. Click **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""MetaStream Credentials"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **FortiNDR Cloud Account Code** are required to retrieve event data.""}, {""name"": ""API Credentials"", ""description"": ""**FortiNDR Cloud API Token**, **FortiNDR Cloud Account UUID** are required to retrieve detection data.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiNDR%20Cloud/Data%20Connectors/FortinetFortiNdrCloud_API_AzureFunctionApp.json","true" +"FncEventsSuricata_CL","Fortinet FortiNDR Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiNDR%20Cloud","fortinet","fortindrcloud-sentinel","2024-01-15","","","Fortinet","Partner","https://www.fortinet.com/support","","domains","FortinetFortiNdrCloudDataConnector","Fortinet","Fortinet FortiNDR Cloud","The Fortinet FortiNDR Cloud data connector provides the capability to ingest [Fortinet FortiNDR Cloud](https://docs.fortinet.com/product/fortindr-cloud) data into Microsoft Sentinel using the FortiNDR Cloud API","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the FortiNDR Cloud API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/Fortinet%20FortiNDR%20Cloud/Parsers/Fortinet_FortiNDR_Cloud.md) to create the Kusto function alias **Fortinet_FortiNDR_Cloud**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Fortinet FortiNDR Cloud Logs Collection**\n\nThe provider should provide or link to detailed steps to configure the 'PROVIDER NAME APPLICATION NAME' API endpoint so that the Azure Function can authenticate to it successfully, get its authorization key or token, and pull the appliance's logs into Microsoft Sentinel.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Fortinet FortiNDR Cloud connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the as well as the FortiNDR Cloud API credentials (available in FortiNDR Cloud account management), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""**Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the Fortinet FortiNDR Cloud connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-FortinetFortiNDR-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**(Make sure using the same location as your Resource Group, and got the location supports Flex Consumption. \n3. Enter the **Workspace ID**, **Workspace Key**, **AwsAccessKeyId**, **AwsSecretAccessKey**, and/or Other required fields. \n4. Click **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""MetaStream Credentials"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **FortiNDR Cloud Account Code** are required to retrieve event data.""}, {""name"": ""API Credentials"", ""description"": ""**FortiNDR Cloud API Token**, **FortiNDR Cloud Account UUID** are required to retrieve detection data.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiNDR%20Cloud/Data%20Connectors/FortinetFortiNdrCloud_API_AzureFunctionApp.json","true" +"CommonSecurityLog","Fortinet FortiWeb Cloud WAF-as-a-Service connector for Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiWeb%20Cloud%20WAF-as-a-Service%20connector%20for%20Microsoft%20Sentinel","azuresentinel","azure-sentinel-solution-fortiwebcloud","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","FortinetFortiWeb","Microsoft","[Deprecated] Fortinet FortiWeb Web Application Firewall via Legacy Agent","The [fortiweb](https://www.fortinet.com/products/web-application-firewall/fortiweb) data connector provides the capability to ingest Threat Analytics and events into Microsoft Sentinel.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiWeb%20Cloud%20WAF-as-a-Service%20connector%20for%20Microsoft%20Sentinel/Data%20Connectors/Fortiweb.json","true" +"CommonSecurityLog","Fortinet FortiWeb Cloud WAF-as-a-Service connector for Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiWeb%20Cloud%20WAF-as-a-Service%20connector%20for%20Microsoft%20Sentinel","azuresentinel","azure-sentinel-solution-fortiwebcloud","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","FortinetFortiWebAma","Microsoft","Fortinet FortiWeb Web Application Firewall via AMA","The [fortiweb](https://www.fortinet.com/products/web-application-firewall/fortiweb) data connector provides the capability to ingest Threat Analytics and events into Microsoft Sentinel.","[{""title"": """", ""description"": """", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine""}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiWeb%20Cloud%20WAF-as-a-Service%20connector%20for%20Microsoft%20Sentinel/Data%20Connectors/template_FortiwebAma.json","true" +"","GDPR Compliance & Data Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GDPR%20Compliance%20%26%20Data%20Security","azuresentinel","azure-sentinel-solution-gdpr-compliance","2025-10-08","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"Garrison_ULTRARemoteLogs_CL","Garrison ULTRA","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Garrison%20ULTRA","garrisontechnologyltd1725375696148","microsoft-sentinel-solution-garrison-ultra","2024-10-04","","","Garrison","Partner","https://support.ultra.garrison.com","","domains","GarrisonULTRARemoteLogs","Garrison","Garrison ULTRA Remote Logs","The [Garrison ULTRA](https://www.garrison.com/en/garrison-ultra-cloud-platform) Remote Logs connector allows you to ingest Garrison ULTRA Remote Logs into Microsoft Sentinel.","[{""title"": ""Deployment - Azure Resource Manager (ARM) Template"", ""description"": ""These steps outline the automated deployment of the Garrison ULTRA Remote Logs data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Frefs%2Fheads%2Fmaster%2FSolutions%2FGarrison%2520ULTRA%2FData%2520Connectors%2FGarrisonULTRARemoteLogs%2Fazuredeploy_DataCollectionResources.json) \t\t\t\n2. Provide the required details such as Resource Group, Microsoft Sentinel Workspace and ingestion configurations \n> **NOTE:** It is recommended to create a new Resource Group for deployment of these resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Garrison ULTRA"", ""description"": ""To use this data connector you must have an active [Garrison ULTRA](https://www.garrison.com/en/garrison-ultra-cloud-platform) license.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Garrison%20ULTRA/Data%20Connectors/GarrisonULTRARemoteLogs/GarrisonULTRARemoteLogs_ConnectorUI.json","true" +"Gigamon_CL","Gigamon Connector","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Gigamon%20Connector","gigamon-inc","microsoft-sentinel-solution-gigamon","","","","Gigamon","Partner","https://www.gigamon.com/","","domains","GigamonDataConnector","Gigamon","Gigamon AMX Data Connector","Use this data connector to integrate with Gigamon Application Metadata Exporter (AMX) and get data sent directly to Microsoft Sentinel. ","[{""title"": ""Gigamon Data Connector"", ""description"": ""1. Application Metadata Exporter (AMX) application converts the output from the Application Metadata Intelligence (AMI) in CEF format into JSON format and sends it to the cloud tools and Kafka.\n 2. The AMX application can be deployed only on a V Series Node and can be connected to Application Metadata Intelligence running on a physical node or a virtual machine.\n 3. The AMX application and the AMI are managed by GigaVUE-FM. This application is supported on VMware ESXi, VMware NSX-T, AWS and Azure.\n "", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Gigamon%20Connector/Data%20Connectors/Connector_Analytics_Gigamon.json","true" +"GitHubAuditLogsV2_CL","GitHub","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GitHub","microsoftcorporation1622712991604","sentinel4github","2021-10-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GitHubAuditDefinitionV2","Microsoft","GitHub Enterprise Audit Log (via Codeless Connector Framework) (Preview)","The GitHub audit log connector provides the capability to ingest GitHub logs into Microsoft Sentinel. By connecting GitHub audit logs into Microsoft Sentinel, you can view this data in workbooks, use it to create custom alerts, and improve your investigation process.

**Note:** If you intended to ingest GitHub subscribed events into Microsoft Sentinel, please refer to GitHub (using Webhooks) Connector from ""**Data Connectors**"" gallery.","[{""title"": ""Connect the GitHub Enterprise-level Audit Log to Microsoft Sentinel"", ""description"": ""Enable GitHub audit logs. \n Follow [this guide](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens#creating-a-personal-access-token-classic) to create or find your personal access token."", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Github Enterprise API URL"", ""columnValue"": ""properties.addOnAttributes.ApiUrl""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Enterprise"", ""title"": ""Add Enterprise"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""parameters"": {""content"": ""Enter your Github Enterprise API URL and API key. Github Enterprise API URL formats:\n* `https://api.github.com/enterprises/{enterprise}`\n* `https://api.{subdomain}.ghe.com/enterprises/{enterprise}`""}, ""type"": ""Markdown""}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Github Enterprise API URL"", ""placeholder"": ""Your Github Enterprise API URL"", ""type"": ""text"", ""name"": ""ApiUrl""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""Enter API Key"", ""type"": ""password"", ""name"": ""apikey""}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""GitHub API personal access token"", ""description"": ""To enable polling for the Enterprise audit log, ensure the authenticated user is an Enterprise admin and has a GitHub personal access token (classic) with the `read:audit_log` scope.""}, {""name"": ""GitHub Enterprise type"", ""description"": ""This connector will only function with GitHub Enterprise Cloud; it will not support GitHub Enterprise Server.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GitHub/Data%20Connectors/GitHubAuditLogs_CCF/GitHubAuditLogs_ConnectorDefinition.json","true" +"GitHubAuditLogPolling_CL","GitHub","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GitHub","microsoftcorporation1622712991604","sentinel4github","2021-10-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GitHubEcAuditLogPolling","GitHub","[Deprecated] GitHub Enterprise Audit Log","The GitHub audit log connector provides the capability to ingest GitHub logs into Microsoft Sentinel. By connecting GitHub audit logs into Microsoft Sentinel, you can view this data in workbooks, use it to create custom alerts, and improve your investigation process.

**Note:** If you intended to ingest GitHub subscribed events into Microsoft Sentinel, please refer to GitHub (using Webhooks) Connector from ""**Data Connectors**"" gallery.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": ""Connect the GitHub Enterprise Organization-level Audit Log to Microsoft Sentinel"", ""description"": ""Enable GitHub audit logs. \n Follow [this guide](https://docs.github.com/en/github/authenticating-to-github/keeping-your-account-and-data-secure/creating-a-personal-access-token) to create or find your personal access token."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Organization Name"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{placeHolder1}}"", ""placeHolderValue"": """"}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""GitHub API personal access token"", ""description"": ""You need a GitHub personal access token to enable polling for the organization audit log. You may use either a classic token with 'read:org' scope OR a fine-grained token with 'Administration: Read-only' scope.""}, {""name"": ""GitHub Enterprise type"", ""description"": ""This connector will only function with GitHub Enterprise Cloud; it will not support GitHub Enterprise Server. ""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GitHub/Data%20Connectors/azuredeploy_GitHub_native_poller_connector.json","true" +"githubscanaudit_CL","GitHub","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GitHub","microsoftcorporation1622712991604","sentinel4github","2021-10-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GitHubWebhook","Microsoft","GitHub (using Webhooks)","The [GitHub](https://www.github.com) webhook data connector provides the capability to ingest GitHub subscribed events into Microsoft Sentinel using [GitHub webhook events](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads). The connector provides ability to get events into Microsoft Sentinel which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

**Note:** If you are intended to ingest Github Audit logs, Please refer to GitHub Enterprise Audit Log Connector from ""**Data Connectors**"" gallery.","[{""title"": """", ""description"": "">**NOTE:** This connector has been built on http trigger based Azure Function. And it provides an endpoint to which github will be connected through it's webhook capability and posts the subscribed events into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Github Webhook connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the GitHub data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-GitHubwebhookAPI-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region and deploy. \n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the GitHub webhook data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentinel-GitHubWebhookAPI-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration. \n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional) - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""**Post Deployment steps**\n\n""}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""STEP 1 - To get the Azure Function url"", ""description"": "" 1. Go to Azure function Overview page and Click on \""Functions\"" in the left blade.\n 2. Click on the function called \""GithubwebhookConnector\"".\n 3. Go to \""GetFunctionurl\"" and copy the function url.""}, {""title"": ""STEP 2 - Configure Webhook to Github Organization"", ""description"": ""1. Go to [GitHub](https://www.github.com) and open your account and click on \""Your Organizations.\""\n 2. Click on Settings.\n 3. Click on \""Webhooks\"" and enter the function app url which was copied from above STEP 1 under payload URL textbox. \n 4. Choose content type as \""application/json\"". \n 5. Subscribe for events and Click on \""Add Webhook\""""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""*Now we are done with the github Webhook configuration. Once the github events triggered and after the delay of 20 to 30 mins (As there will be a dealy for LogAnalytics to spin up the resources for the first time), you should be able to see all the transactional events from the Github into LogAnalytics workspace table called \""githubscanaudit_CL\"".*\n\n For more details, Click [here](https://aka.ms/sentinel-gitHubwebhooksteps)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GitHub/Data%20Connectors/GithubWebhook/GithubWebhook_API_FunctionApp.json","true" +"Syslog","GitLab","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GitLab","azuresentinel","azure-sentinel-solution-gitlab","2022-04-27","2022-06-27","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GitLab","Microsoft","[Deprecated] GitLab","The [GitLab](https://about.gitlab.com/solutions/devops-platform/) connector allows you to easily connect your GitLab (GitLab Enterprise Edition - Standalone) logs with Microsoft Sentinel. This gives you more security insight into your organization's DevOps pipelines.","[{""title"": ""Configuration"", ""description"": "">This data connector depends on three parsers based on a Kusto Function to work as expected [**GitLab Access Logs**](https://aka.ms/sentinel-GitLabAccess-parser), [**GitLab Audit Logs**](https://aka.ms/sentinel-GitLabAudit-parser) and [**GitLab Application Logs**](https://aka.ms/sentinel-GitLabApp-parser) which are deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n2. Select **Apply below configuration to my machines** and select the facilities and severities.\n3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GitLab/Data%20Connectors/Connector_Syslog_GitLab.json","true" +"","Global Secure Access","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Global%20Secure%20Access","azuresentinel","azure-sentinel-solution-globalsecureaccess","2024-04-08","","","Microsoft Corporation","Microsoft","https://learn.microsoft.com/en-us/entra/global-secure-access/overview-what-is-global-secure-access","","domains","","","","","","","","false" +"ApigeeX_CL","Google Apigee","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Apigee","azuresentinel","azure-sentinel-solution-googleapigeex","2021-10-28","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ApigeeXDataConnector","Google","[DEPRECATED] Google ApigeeX","The [Google ApigeeX](https://cloud.google.com/apigee/docs) data connector provides the capability to ingest ApigeeX audit logs into Microsoft Sentinel using the GCP Logging API. Refer to [GCP Logging API documentation](https://cloud.google.com/logging/docs/reference/v2/rest) for more information.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the GCP API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**ApigeeX**](https://aka.ms/sentinel-ApigeeXDataConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuring GCP and obtaining credentials**\n\n1. Make sure that Logging API is [enabled](https://cloud.google.com/apis/docs/getting-started#enabling_apis). \n\n2. [Create service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts) with [required permissions](https://cloud.google.com/iam/docs/audit-logging#audit_log_permissions) and [get service account key json file](https://cloud.google.com/iam/docs/creating-managing-service-account-keys).\n\n3. Prepare GCP project ID where ApigeeX is located.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Azure Blob Storage connection string and container name, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ApigeeXDataConnector-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Google Cloud Platform Project Id**, **Google Cloud Platform Credentials File Content**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-ApigeeXDataConnector-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions.\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tRESOURCE_NAMES\n\t\tCREDENTIALS_FILE_CONTENT\n\t\tWORKSPACE_ID\n\t\tSHARED_KEY\n\t\tlogAnalyticsUri (Optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""GCP service account"", ""description"": ""GCP service account with permissions to read logs is required for GCP Logging API. Also json file with service account key is required. See the documentation to learn more about [required permissions](https://cloud.google.com/iam/docs/audit-logging#audit_log_permissions), [creating service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts) and [creating service account key](https://cloud.google.com/iam/docs/creating-managing-service-account-keys).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Apigee/Data%20Connectors/ApigeeX_FunctionApp.json","true" +"GCPApigee","Google Apigee","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Apigee","azuresentinel","azure-sentinel-solution-googleapigeex","2021-10-28","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GoogleApigeeXLogsCCPDefinition","Microsoft","Google ApigeeX (via Codeless Connector Framework)","The Google ApigeeX data connector provides the capability to ingest Audit logs into Microsoft Sentinel using the Google Apigee API. Refer to [Google Apigee API](https://cloud.google.com/apigee/docs/reference/apis/apigee/rest/?apix=true) documentation for more information.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Apigee/Data%20Connectors/ApigeeXReadme.md) for log setup and authentication setup tutorial.\n Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPApigeeLogSetup)\nAuthentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup)"", ""govScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Apigee/Data%20Connectors/ApigeeXReadme.md) for log setup and authentication setup tutorial.\n Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPApigeeLogSetup)\nAuthentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov)""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable ApigeeX logs \n In the Google Cloud Console, enable Apigee API, if not enabled previously, and save the changes.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable ApigeeX Logs for Microsoft Sentinel, click on Add new collector button, provide the required information in the pop up and click on Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}], ""title"": ""Connect Google ApigeeX to Microsoft Sentinel\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Apigee/Data%20Connectors/GoogleApigeeXLog_CCP/GoogleApigeeXLog_ConnectorDefinition.json","true" +"GCPAuditLogs","Google Cloud Platform Audit Logs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Audit%20Logs","azuresentinel","azure-sentinel-solution-gcpauditlogs-api","2023-03-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GCPAuditLogsDefinition","Microsoft","GCP Pub/Sub Audit Logs","The Google Cloud Platform (GCP) audit logs, ingested from Microsoft Sentinel's connector, enables you to capture three types of audit logs: admin activity logs, data access logs, and access transparency logs. Google cloud audit logs record a trail that practitioners can use to monitor access and detect potential threats across Google Cloud Platform (GCP) resources.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Set up your GCP environment \n You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider and service account with permissions to get and consume from subscription. \n Terraform provides API for the IAM that creates the resources. [Link to Terraform scripts](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation)."", ""govScript"": ""#### 1. Set up your GCP environment \n You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider and service account with permissions to get and consume from subscription. \n Terraform provides API for the IAM that creates the resources. [Link to Gov Terraform scripts](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov).""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable GCP Audit Logs for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Audit%20Logs/Data%20Connectors/GCPAuditLogs_ccp/data_connector_definition.json","true" +"","Google Cloud Platform BigQuery","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20BigQuery","azuresentinel","azure-sentinel-solution-gcpbigquery","2023-03-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"GCPMonitoring","Google Cloud Platform Cloud Monitoring","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Monitoring","azuresentinel","azure-sentinel-solution-gcpmonitoring","2022-07-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GCPMonitorCCPDefinition","Microsoft","Google Cloud Platform Cloud Monitoring (via Codeless Connector Framework)","The Google Cloud Platform Cloud Monitoring data connector ingests Monitoring logs from Google Cloud into Microsoft Sentinel using the Google Cloud Monitoring API. Refer to [Cloud Monitoring API](https://cloud.google.com/monitoring/api/v3) documentation for more details.","[{""title"": ""Connect Google Cloud Platform Cloud Monitoring to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Setup GCP Monitoring Integration\n To fetch logs from GCP Cloud Monitoring to Sentinel **Project ID** of Google cloud is required.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Chose the **Metric Type**\n To collect logs from Google Cloud Monitoring provide the required Metric type.\n\nFor more details, refer to [Google Cloud Metrics](https://cloud.google.com/monitoring/api/metrics_gcp).""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. OAuth Credentials\n To Fetch Oauth client id and client secret refer to this [documentation](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Monitoring/Data%20Connectors/Readme.md).""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 4. Connect to Sentinel\n Click on **Connect** to start pulling monitoring logs from Google Cloud into Microsoft Sentinel.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""GCP Project ID"", ""name"": ""projectid"", ""required"": true, ""description"": ""Enter your Google Cloud Project ID.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Metric Type"", ""type"": ""text"", ""name"": ""metrictype"", ""required"": true, ""description"": ""Provide the metric types you want to collect logs for with comma separated.For example: compute.googleapis.com/instance/disk/write_bytes_count,compute.googleapis.com/instance/uptime_total""}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Metric Type"", ""columnValue"": ""properties.addOnAttributes.metrictype""}, {""columnName"": ""Project ID"", ""columnValue"": ""properties.addOnAttributes.projectid""}], ""menuItems"": [""DeleteConnector""]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Monitoring/Data%20Connectors/GCPMonitoringLogs_CCP/GCPCloudMonitoringLogs_ConnectorDefinition.json","true" +"GCP_MONITORING_CL","Google Cloud Platform Cloud Monitoring","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Monitoring","azuresentinel","azure-sentinel-solution-gcpmonitoring","2022-07-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GCPMonitorDataConnector","Google","[DEPRECATED] Google Cloud Platform Cloud Monitoring","The Google Cloud Platform Cloud Monitoring data connector provides the capability to ingest [GCP Monitoring metrics](https://cloud.google.com/monitoring/api/metrics_gcp) into Microsoft Sentinel using the GCP Monitoring API. Refer to [GCP Monitoring API documentation](https://cloud.google.com/monitoring/api/v3) for more information.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the GCP API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**GCP_MONITORING**](https://aka.ms/sentinel-GCPMonitorDataConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuring GCP and obtaining credentials**\n\n1. [Create service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts) with Monitoring Viewer role and [get service account key json file](https://cloud.google.com/iam/docs/creating-managing-service-account-keys).\n\n2. Prepare the list of GCP projects to get metrics from. [Learn more about GCP projects](https://cloud.google.com/resource-manager/docs/cloud-platform-resource-hierarchy).\n\n3. Prepare the list of [GCP metric types](https://cloud.google.com/monitoring/api/metrics_gcp)""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Azure Blob Storage connection string and container name, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-GCPMonitorDataConnector-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Google Cloud Platform Project Id List**, **Google Cloud Platform Metric Types List**, **Google Cloud Platform Credentials File Content**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GCPMonitorDataConnector-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions.\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGCP_PROJECT_ID\n\t\tGCP_METRICS\n\t\tGCP_CREDENTIALS_FILE_CONTENT\n\t\tWORKSPACE_ID\n\t\tSHARED_KEY\n\t\tlogAnalyticsUri (Optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""GCP service account"", ""description"": ""GCP service account with permissions to read Cloud Monitoring metrics is required for GCP Monitoring API (required *Monitoring Viewer* role). Also json file with service account key is required. See the documentation to learn more about [creating service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts) and [creating service account key](https://cloud.google.com/iam/docs/creating-managing-service-account-keys).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Monitoring/Data%20Connectors/GCP_Monitor_API_FunctionApp.json","true" +"GCPCloudRun","Google Cloud Platform Cloud Run","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Run","azuresentinel","azure-sentinel-solution-gcpcloudrun","2021-07-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GCPCloudRunLogs_ConnectorDefinition","Microsoft","GCP Cloud Run (via Codeless Connector Framework)","The GCP Cloud Run data connector provides the capability to ingest Cloud Run request logs into Microsoft Sentinel using Pub/Sub. Refer the [Cloud Run Overview](https://cloud.google.com/run/docs/logging) for more details.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Run/Data%20Connectors/README.md) for log setup and authentication setup tutorial.\n\n Find the Log set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPCloudRunLogsSetup)\n & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup)"", ""govScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Run/Data%20Connectors/README.md) for log setup and authentication setup tutorial.\n\n Find the Log set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPCloudRunLogsSetup)\n & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov)""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable Cloud Run logs \n In the Google Cloud Console, enable cloud logging if not enabled previously, and save the changes.Deploy or update your Cloud Run services with logging enabled.\n\n Reference Link: [Link to documentation](https://cloud.google.com/run/docs/setup)""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable GCP Cloud Run Request Logs for Microsoft Sentinel, click on Add new collector button, provide the required information in the pop up and click on Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}], ""title"": ""Connect GCP Cloud Run to Microsoft Sentinel\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Run/Data%20Connectors/GCPCloudRunLog_CCF/GCPCloudRunLogs_ConnectorDefinition.json","true" +"GCPComputeEngine","Google Cloud Platform Compute Engine","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Compute%20Engine","azuresentinel","azure-sentinel-solution-gcpcomputeengine","2022-07-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GCPComputeEngineLogsCCPDefinition","Microsoft","Google Cloud Platform Compute Engine (via Codeless Connector Framework)","The Google Cloud Platform Compute Engine data connector provides the capability to ingest Compute Engine Audit logs into Microsoft Sentinel using the Google Cloud Compute Engine API. Refer to [Cloud Compute Engine API](https://cloud.google.com/compute/docs/reference/rest/v1) documentation for more information.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Compute%20Engine/Data%20Connectors/GCPComputeEngineReadme.md) for log setup and authentication setup tutorial.\n Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPComputeEngineLogsSetup/GCPComputeEngineLogSetup.tf)\nAuthentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup)"", ""govScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Compute%20Engine/Data%20Connectors/GCPComputeEngineReadme.md) for log setup and authentication setup tutorial.\n Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPComputeEngineLogsSetup/GCPComputeEngineLogSetup.tf)\nAuthentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov)""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable Compute Engine logs \n In the Google Cloud Console, enable Compute Engine API, if not enabled previously, and save the changes.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable Compute Engine Logs for Microsoft Sentinel, click on Add new collector button, provide the required information in the pop up and click on Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}], ""title"": ""Connect GCP Compute Engine to Microsoft Sentinel\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Compute%20Engine/Data%20Connectors/GCPComputeEngineLog_CCP/GCPComputeEngineLog_ConnectorDefinition.json","true" +"GCPFirewallLogs","Google Cloud Platform Firewall Logs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Firewall%20Logs","","","","","","","","","","","GCPFirewallLogsCCPDefinition","Microsoft","GCP Pub/Sub Firewall Logs","The Google Cloud Platform (GCP) firewall logs, enable you to capture network inbound and outbound activity to monitor access and detect potential threats across Google Cloud Platform (GCP) resources.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Set up your GCP environment \n You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider and service account with permissions to get and consume from subscription. \n Terraform provides API for the IAM that creates the resources. [Link to Terraform scripts](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation)\n Connector tutorial: [Link to tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup) ."", ""govScript"": ""#### 1. Set up your GCP environment \n You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider and service account with permissions to get and consume from subscription. \n Terraform provides API for the IAM that creates the resources. [Link to Gov Terraform scripts](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov)\n Connector tutorial: [Link to tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup).""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable Firewall logs \nIn your GCP account, navigate to the Firewall section. Here, you can either create a new rule or edit an existing one that you want to monitor. Once you open the rule, switch the toggle button under the **Logs** section to **On**, and save the changes.\n\nFor more information: [Link to documentation](https://cloud.google.com/firewall/docs/using-firewall-rules-logging?_gl=1*1no0nhk*_ga*NDMxNDIxODI3LjE3MjUyNjUzMzc.*_ga_WH2QY8WWF5*MTcyNTUyNzc4MS4xMS4xLjE3MjU1MjgxNTIuNDYuMC4w)""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable GCP Firewall Logs for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Firewall%20Logs/Data%20Connectors/GCPFirewallLogs_ccp/GCP_ConnectorDefinition.json","true" +"GCPLoadBalancerLogs_CL","Google Cloud Platform Load Balancer Logs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Load%20Balancer%20Logs","azuresentinel","azure-sentinel-solution-gcploadbalancerlogs-api","2025-02-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GCPFLoadBalancerLogsCCPDefinition","Microsoft","GCP Pub/Sub Load Balancer Logs (via Codeless Connector Platform).","Google Cloud Platform (GCP) Load Balancer logs provide detailed insights into network traffic, capturing both inbound and outbound activities. These logs are used for monitoring access patterns and identifying potential security threats across GCP resources. Additionally, these logs also include GCP Web Application Firewall (WAF) logs, enhancing the ability to detect and mitigate risks effectively.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Set up your GCP environment \n You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider and service account with permissions to get and consume from subscription. \n Terraform provides API for the IAM that creates the resources. [Link to Terraform scripts](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation)."", ""govScript"": ""#### 1. Set up your GCP environment \n You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider and service account with permissions to get and consume from subscription. \n Terraform provides API for the IAM that creates the resources. [Link to Gov Terraform scripts](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov).""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""PoolId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable Load Balancer logs \nIn your GCP account, navigate to the Load Balancer section. In here you can nevigate to [**Backend Service**] -> [**Edit**], once you are in the [**Backend Service**] on the [**Logging**] section **enable** the checkbox of [**Enable Logs**]. Once you open the rule, switch the toggle button under the **Logs** section to **On**, and save the changes.\n\nFor more information: [Link to documentation](https://cloud.google.com/load-balancing/docs/https/https-logging-monitoring)""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable GCP Load Balancer Logs for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Load%20Balancer%20Logs/Data%20Connectors/GCPFLoadBalancerLogs_GCP_CCP/GCPFLoadBalancerLogs_Definition.json","true" +"GoogleCloudSCC","Google Cloud Platform Security Command Center","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Security%20Command%20Center","azuresentinel","azure-sentinel-solution-gcpscclogs-api","2023-09-11","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GoogleSCCDefinition","Microsoft","Google Security Command Center","The Google Cloud Platform (GCP) Security Command Center is a comprehensive security and risk management platform for Google Cloud, ingested from Sentinel's connector. It offers features such as asset inventory and discovery, vulnerability and threat detection, and risk mitigation and remediation to help you gain insight into your organization's security and data attack surface. This integration enables you to perform tasks related to findings and assets more effectively.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Set up your GCP environment \n You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider and service account with permissions to get and consume from subscription. \n Terraform provides API for the IAM that creates the resources. [Link to Terraform scripts](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation).""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""PoolId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable GCP SCC for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}]}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": false, ""write"": false, ""delete"": false, ""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Security%20Command%20Center/Data%20Connectors/GCPSecurityCommandCenter.json","true" +"GCPVPCFlow","Google Cloud Platform VPC Flow Logs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20VPC%20Flow%20Logs","azuresentinel","azure-sentinel-solution-gcpvpcflowlogs-api","2025-02-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GCPVPCFlowLogsCCPDefinition","Microsoft","GCP Pub/Sub VPC Flow Logs (via Codeless Connector Framework)","The Google Cloud Platform (GCP) VPC Flow Logs enable you to capture network traffic activity at the VPC level, allowing you to monitor access patterns, analyze network performance, and detect potential threats across GCP resources.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Set up your GCP environment \n You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription. \n To configure this data connector, execute the following Terraform scripts:\n 1. Setup Required Resources: [Configuration Guide](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPVPCFlowLogsSetup/readme.md)\n 2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector , kindly skip this step and use the existing service account and workload identity pool."", ""govScript"": ""#### 1. Set up your GCP environment \n You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription. \n To configure this data connector, execute the following Terraform scripts:\n 1. Setup Required Resources: [Configuration Guide]https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPVPCFlowLogsSetup/readme.md)\n 2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector , kindly skip this step and use the existing service account and workload identity pool.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable VPC Flow Logs \nIn your GCP account, navigate to the VPC network section. Select the subnet you want to monitor and enable Flow Logs under the Logging section.\n\nFor more information: [Google Cloud Documentation](https://cloud.google.com/vpc/docs/using-flow-logs)""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable GCP VPC Flow Logs for Microsoft Sentinel, click the Add new collector button, fill in the required information in the context pane, and click Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20VPC%20Flow%20Logs/Data%20Connectors/GCPVPCFlowLogs_GCP_CCP/GCPVPCFlowLogs_ConnectorDefinition.json","true" +"GKEAPIServer","Google Kubernetes Engine","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine","azuresentinel","azure-sentinel-solution-gkelogs-api","2025-04-04","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GKECCPDefinition","Microsoft","Google Kubernetes Engine (via Codeless Connector Framework)","The Google Kubernetes Engine (GKE) Logs enable you to capture cluster activity, workload behavior, and security events, allowing you to monitor Kubernetes workloads, analyze performance, and detect potential threats across GKE clusters.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Set up your GCP environment \nYou must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription.\n\nTo configure this data connector, execute the following Terraform scripts:\n\n1. Setup Required Resources: [Configuration Guide](https://github.com/Alekhya0824/GithubValidationREPO/blob/main/gke/Readme.md)\n2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector, kindly skip this step and use the existing service account and workload identity pool."", ""govScript"": ""#### 1. Set up your GCP environment \nYou must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription.\n\nTo configure this data connector, execute the following Terraform scripts:\n\n1. Setup Required Resources: [Configuration Guide](https://github.com/Alekhya0824/GithubValidationREPO/blob/main/gke/Readme.md)\n2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector, kindly skip this step and use the existing service account and workload identity pool.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable Kubernetes Engine Logging \nIn your GCP account, navigate to the Kubernetes Engine section. Enable Cloud Logging for your clusters. Within Cloud Logging, ensure that the specific logs you want to ingest\u2014such as API server, scheduler, controller manager, HPA decision, and application logs\u2014are enabled for effective monitoring and security analysis.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \nTo enable GKE Logs for Microsoft Sentinel, click the **Add new collector** button, fill in the required information in the context pane, and click **Connect**.""}}, {""type"": ""GCPGrid"", ""parameters"": {""collectors"": [{""name"": ""Audit Collector"", ""tableName"": ""GKEAudit""}, {""name"": ""API Server Collector"", ""tableName"": ""GKEAPIServer""}, {""name"": ""Scheduler Collector"", ""tableName"": ""GKEScheduler""}, {""name"": ""Controller Manager Collector"", ""tableName"": ""GKEControllerManager""}, {""name"": ""HPA Decision Collector"", ""tableName"": ""GKEHPADecision""}, {""name"": ""Application Collector"", ""tableName"": ""GKEApplication""}]}}, {""type"": ""GCPContextPane"", ""parameters"": {}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine/Data%20Connectors/GoogleKubernetesEngineLogs_ccp/GoogleKubernetesEngineLogs_ConnectorDefinition.json","true" +"GKEApplication","Google Kubernetes Engine","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine","azuresentinel","azure-sentinel-solution-gkelogs-api","2025-04-04","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GKECCPDefinition","Microsoft","Google Kubernetes Engine (via Codeless Connector Framework)","The Google Kubernetes Engine (GKE) Logs enable you to capture cluster activity, workload behavior, and security events, allowing you to monitor Kubernetes workloads, analyze performance, and detect potential threats across GKE clusters.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Set up your GCP environment \nYou must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription.\n\nTo configure this data connector, execute the following Terraform scripts:\n\n1. Setup Required Resources: [Configuration Guide](https://github.com/Alekhya0824/GithubValidationREPO/blob/main/gke/Readme.md)\n2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector, kindly skip this step and use the existing service account and workload identity pool."", ""govScript"": ""#### 1. Set up your GCP environment \nYou must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription.\n\nTo configure this data connector, execute the following Terraform scripts:\n\n1. Setup Required Resources: [Configuration Guide](https://github.com/Alekhya0824/GithubValidationREPO/blob/main/gke/Readme.md)\n2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector, kindly skip this step and use the existing service account and workload identity pool.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable Kubernetes Engine Logging \nIn your GCP account, navigate to the Kubernetes Engine section. Enable Cloud Logging for your clusters. Within Cloud Logging, ensure that the specific logs you want to ingest\u2014such as API server, scheduler, controller manager, HPA decision, and application logs\u2014are enabled for effective monitoring and security analysis.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \nTo enable GKE Logs for Microsoft Sentinel, click the **Add new collector** button, fill in the required information in the context pane, and click **Connect**.""}}, {""type"": ""GCPGrid"", ""parameters"": {""collectors"": [{""name"": ""Audit Collector"", ""tableName"": ""GKEAudit""}, {""name"": ""API Server Collector"", ""tableName"": ""GKEAPIServer""}, {""name"": ""Scheduler Collector"", ""tableName"": ""GKEScheduler""}, {""name"": ""Controller Manager Collector"", ""tableName"": ""GKEControllerManager""}, {""name"": ""HPA Decision Collector"", ""tableName"": ""GKEHPADecision""}, {""name"": ""Application Collector"", ""tableName"": ""GKEApplication""}]}}, {""type"": ""GCPContextPane"", ""parameters"": {}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine/Data%20Connectors/GoogleKubernetesEngineLogs_ccp/GoogleKubernetesEngineLogs_ConnectorDefinition.json","true" +"GKEAudit","Google Kubernetes Engine","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine","azuresentinel","azure-sentinel-solution-gkelogs-api","2025-04-04","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GKECCPDefinition","Microsoft","Google Kubernetes Engine (via Codeless Connector Framework)","The Google Kubernetes Engine (GKE) Logs enable you to capture cluster activity, workload behavior, and security events, allowing you to monitor Kubernetes workloads, analyze performance, and detect potential threats across GKE clusters.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Set up your GCP environment \nYou must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription.\n\nTo configure this data connector, execute the following Terraform scripts:\n\n1. Setup Required Resources: [Configuration Guide](https://github.com/Alekhya0824/GithubValidationREPO/blob/main/gke/Readme.md)\n2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector, kindly skip this step and use the existing service account and workload identity pool."", ""govScript"": ""#### 1. Set up your GCP environment \nYou must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription.\n\nTo configure this data connector, execute the following Terraform scripts:\n\n1. Setup Required Resources: [Configuration Guide](https://github.com/Alekhya0824/GithubValidationREPO/blob/main/gke/Readme.md)\n2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector, kindly skip this step and use the existing service account and workload identity pool.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable Kubernetes Engine Logging \nIn your GCP account, navigate to the Kubernetes Engine section. Enable Cloud Logging for your clusters. Within Cloud Logging, ensure that the specific logs you want to ingest\u2014such as API server, scheduler, controller manager, HPA decision, and application logs\u2014are enabled for effective monitoring and security analysis.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \nTo enable GKE Logs for Microsoft Sentinel, click the **Add new collector** button, fill in the required information in the context pane, and click **Connect**.""}}, {""type"": ""GCPGrid"", ""parameters"": {""collectors"": [{""name"": ""Audit Collector"", ""tableName"": ""GKEAudit""}, {""name"": ""API Server Collector"", ""tableName"": ""GKEAPIServer""}, {""name"": ""Scheduler Collector"", ""tableName"": ""GKEScheduler""}, {""name"": ""Controller Manager Collector"", ""tableName"": ""GKEControllerManager""}, {""name"": ""HPA Decision Collector"", ""tableName"": ""GKEHPADecision""}, {""name"": ""Application Collector"", ""tableName"": ""GKEApplication""}]}}, {""type"": ""GCPContextPane"", ""parameters"": {}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine/Data%20Connectors/GoogleKubernetesEngineLogs_ccp/GoogleKubernetesEngineLogs_ConnectorDefinition.json","true" +"GKEControllerManager","Google Kubernetes Engine","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine","azuresentinel","azure-sentinel-solution-gkelogs-api","2025-04-04","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GKECCPDefinition","Microsoft","Google Kubernetes Engine (via Codeless Connector Framework)","The Google Kubernetes Engine (GKE) Logs enable you to capture cluster activity, workload behavior, and security events, allowing you to monitor Kubernetes workloads, analyze performance, and detect potential threats across GKE clusters.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Set up your GCP environment \nYou must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription.\n\nTo configure this data connector, execute the following Terraform scripts:\n\n1. Setup Required Resources: [Configuration Guide](https://github.com/Alekhya0824/GithubValidationREPO/blob/main/gke/Readme.md)\n2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector, kindly skip this step and use the existing service account and workload identity pool."", ""govScript"": ""#### 1. Set up your GCP environment \nYou must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription.\n\nTo configure this data connector, execute the following Terraform scripts:\n\n1. Setup Required Resources: [Configuration Guide](https://github.com/Alekhya0824/GithubValidationREPO/blob/main/gke/Readme.md)\n2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector, kindly skip this step and use the existing service account and workload identity pool.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable Kubernetes Engine Logging \nIn your GCP account, navigate to the Kubernetes Engine section. Enable Cloud Logging for your clusters. Within Cloud Logging, ensure that the specific logs you want to ingest\u2014such as API server, scheduler, controller manager, HPA decision, and application logs\u2014are enabled for effective monitoring and security analysis.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \nTo enable GKE Logs for Microsoft Sentinel, click the **Add new collector** button, fill in the required information in the context pane, and click **Connect**.""}}, {""type"": ""GCPGrid"", ""parameters"": {""collectors"": [{""name"": ""Audit Collector"", ""tableName"": ""GKEAudit""}, {""name"": ""API Server Collector"", ""tableName"": ""GKEAPIServer""}, {""name"": ""Scheduler Collector"", ""tableName"": ""GKEScheduler""}, {""name"": ""Controller Manager Collector"", ""tableName"": ""GKEControllerManager""}, {""name"": ""HPA Decision Collector"", ""tableName"": ""GKEHPADecision""}, {""name"": ""Application Collector"", ""tableName"": ""GKEApplication""}]}}, {""type"": ""GCPContextPane"", ""parameters"": {}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine/Data%20Connectors/GoogleKubernetesEngineLogs_ccp/GoogleKubernetesEngineLogs_ConnectorDefinition.json","true" +"GKEHPADecision","Google Kubernetes Engine","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine","azuresentinel","azure-sentinel-solution-gkelogs-api","2025-04-04","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GKECCPDefinition","Microsoft","Google Kubernetes Engine (via Codeless Connector Framework)","The Google Kubernetes Engine (GKE) Logs enable you to capture cluster activity, workload behavior, and security events, allowing you to monitor Kubernetes workloads, analyze performance, and detect potential threats across GKE clusters.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Set up your GCP environment \nYou must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription.\n\nTo configure this data connector, execute the following Terraform scripts:\n\n1. Setup Required Resources: [Configuration Guide](https://github.com/Alekhya0824/GithubValidationREPO/blob/main/gke/Readme.md)\n2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector, kindly skip this step and use the existing service account and workload identity pool."", ""govScript"": ""#### 1. Set up your GCP environment \nYou must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription.\n\nTo configure this data connector, execute the following Terraform scripts:\n\n1. Setup Required Resources: [Configuration Guide](https://github.com/Alekhya0824/GithubValidationREPO/blob/main/gke/Readme.md)\n2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector, kindly skip this step and use the existing service account and workload identity pool.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable Kubernetes Engine Logging \nIn your GCP account, navigate to the Kubernetes Engine section. Enable Cloud Logging for your clusters. Within Cloud Logging, ensure that the specific logs you want to ingest\u2014such as API server, scheduler, controller manager, HPA decision, and application logs\u2014are enabled for effective monitoring and security analysis.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \nTo enable GKE Logs for Microsoft Sentinel, click the **Add new collector** button, fill in the required information in the context pane, and click **Connect**.""}}, {""type"": ""GCPGrid"", ""parameters"": {""collectors"": [{""name"": ""Audit Collector"", ""tableName"": ""GKEAudit""}, {""name"": ""API Server Collector"", ""tableName"": ""GKEAPIServer""}, {""name"": ""Scheduler Collector"", ""tableName"": ""GKEScheduler""}, {""name"": ""Controller Manager Collector"", ""tableName"": ""GKEControllerManager""}, {""name"": ""HPA Decision Collector"", ""tableName"": ""GKEHPADecision""}, {""name"": ""Application Collector"", ""tableName"": ""GKEApplication""}]}}, {""type"": ""GCPContextPane"", ""parameters"": {}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine/Data%20Connectors/GoogleKubernetesEngineLogs_ccp/GoogleKubernetesEngineLogs_ConnectorDefinition.json","true" +"GKEScheduler","Google Kubernetes Engine","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine","azuresentinel","azure-sentinel-solution-gkelogs-api","2025-04-04","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GKECCPDefinition","Microsoft","Google Kubernetes Engine (via Codeless Connector Framework)","The Google Kubernetes Engine (GKE) Logs enable you to capture cluster activity, workload behavior, and security events, allowing you to monitor Kubernetes workloads, analyze performance, and detect potential threats across GKE clusters.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Set up your GCP environment \nYou must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription.\n\nTo configure this data connector, execute the following Terraform scripts:\n\n1. Setup Required Resources: [Configuration Guide](https://github.com/Alekhya0824/GithubValidationREPO/blob/main/gke/Readme.md)\n2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector, kindly skip this step and use the existing service account and workload identity pool."", ""govScript"": ""#### 1. Set up your GCP environment \nYou must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription.\n\nTo configure this data connector, execute the following Terraform scripts:\n\n1. Setup Required Resources: [Configuration Guide](https://github.com/Alekhya0824/GithubValidationREPO/blob/main/gke/Readme.md)\n2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector, kindly skip this step and use the existing service account and workload identity pool.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable Kubernetes Engine Logging \nIn your GCP account, navigate to the Kubernetes Engine section. Enable Cloud Logging for your clusters. Within Cloud Logging, ensure that the specific logs you want to ingest\u2014such as API server, scheduler, controller manager, HPA decision, and application logs\u2014are enabled for effective monitoring and security analysis.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \nTo enable GKE Logs for Microsoft Sentinel, click the **Add new collector** button, fill in the required information in the context pane, and click **Connect**.""}}, {""type"": ""GCPGrid"", ""parameters"": {""collectors"": [{""name"": ""Audit Collector"", ""tableName"": ""GKEAudit""}, {""name"": ""API Server Collector"", ""tableName"": ""GKEAPIServer""}, {""name"": ""Scheduler Collector"", ""tableName"": ""GKEScheduler""}, {""name"": ""Controller Manager Collector"", ""tableName"": ""GKEControllerManager""}, {""name"": ""HPA Decision Collector"", ""tableName"": ""GKEHPADecision""}, {""name"": ""Application Collector"", ""tableName"": ""GKEApplication""}]}}, {""type"": ""GCPContextPane"", ""parameters"": {}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine/Data%20Connectors/GoogleKubernetesEngineLogs_ccp/GoogleKubernetesEngineLogs_ConnectorDefinition.json","true" +"","Google Threat Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Threat%20Intelligence","virustotalsl1681486227461","azure-sentinel-solution-google","2024-10-26","2024-10-26","","Google","Partner","https://www.virustotal.com/gui/contact-us","","domains","","","","","","","","false" +"GCPCDN","GoogleCloudPlatformCDN","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformCDN","azuresentinel","azure-sentinel-solution-gcp-cdn","2025-03-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GCPCDNLogsCCPDefinition","Microsoft","Google Cloud Platform CDN (via Codeless Connector Framework)","The Google Cloud Platform CDN data connector provides the capability to ingest Cloud CDN Audit logs and Cloud CDN Traffic logs into Microsoft Sentinel using the Compute Engine API. Refer the [Product overview](https://cloud.google.com/cdn/docs/overview) document for more details.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformCDN/Data%20Connectors/README.md) for log setup and authentication setup tutorial.\n\n Find the Log set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPCDNLogsSetup)\n & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup)"", ""govScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformCDN/Data%20Connectors/README.md) for log setup and authentication setup tutorial.\n\n Find the Log set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPCDNLogsSetup)\n & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov)""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable CDN logs \n In the Google Cloud Console, enable cloud logging if not enabled previously, and save the changes. Navigate to Cloud CDN section and click on Add origin to create backends as per link provided below. \n\n Reference Link: [Link to documentation](https://cloud.google.com/cdn/docs/using-cdn)""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable GCP Cloud CDN Logs for Microsoft Sentinel, click on Add new collector button, provide the required information in the pop up and click on Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}], ""title"": ""Connect GCP CDN to Microsoft Sentinel\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformCDN/Data%20Connectors/GCPCDNLogs_ccp/GCPCDNLogs_ConnectorDefinition.json","true" +"GCP_DNS_CL","GoogleCloudPlatformDNS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformDNS","azuresentinel","azure-sentinel-solution-gcpdns","2022-07-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GCPDNSDataConnector","Google","[DEPRECATED] Google Cloud Platform DNS","The Google Cloud Platform DNS data connector provides the capability to ingest [Cloud DNS query logs](https://cloud.google.com/dns/docs/monitoring#using_logging) and [Cloud DNS audit logs](https://cloud.google.com/dns/docs/audit-logging) into Microsoft Sentinel using the GCP Logging API. Refer to [GCP Logging API documentation](https://cloud.google.com/logging/docs/api) for more information.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the GCP API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**GCPCloudDNS**](https://aka.ms/sentinel-GCPDNSDataConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuring GCP and obtaining credentials**\n\n1. Make sure that Logging API is [enabled](https://cloud.google.com/apis/docs/getting-started#enabling_apis). \n\n2. [Create service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts) with Logs Viewer role (or at least with \""logging.logEntries.list\"" permission) and [get service account key json file](https://cloud.google.com/iam/docs/creating-managing-service-account-keys).\n\n3. Prepare the list of GCP resources (organizations, folders, projects) to get logs from. [Learn more about GCP resources](https://cloud.google.com/resource-manager/docs/cloud-platform-resource-hierarchy).""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Azure Blob Storage connection string and container name, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-GCPDNSDataConnector-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Google Cloud Platform Resource Names**, **Google Cloud Platform Credentials File Content**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GCPDNSDataConnector-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions.\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tRESOURCE_NAMES\n\t\tCREDENTIALS_FILE_CONTENT\n\t\tWORKSPACE_ID\n\t\tSHARED_KEY\n\t\tlogAnalyticsUri (Optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""GCP service account"", ""description"": ""GCP service account with permissions to read logs (with \""logging.logEntries.list\"" permission) is required for GCP Logging API. Also json file with service account key is required. See the documentation to learn more about [permissions](https://cloud.google.com/logging/docs/access-control#permissions_and_roles), [creating service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts) and [creating service account key](https://cloud.google.com/iam/docs/creating-managing-service-account-keys).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformDNS/Data%20Connectors/GCP_DNS_API_FunctionApp.json","true" +"GCPDNS","GoogleCloudPlatformDNS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformDNS","azuresentinel","azure-sentinel-solution-gcpdns","2022-07-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GCPDNSLogsCCPDefinition","Microsoft","Google Cloud Platform DNS (via Codeless Connector Framework)","The Google Cloud Platform DNS data connector provides the capability to ingest Cloud DNS Query logs and Cloud DNS Audit logs into Microsoft Sentinel using the Google Cloud DNS API. Refer to [Cloud DNS API](https://cloud.google.com/dns/docs/reference/rest/v1) documentation for more information.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** If both Azure Function and CCP connector are running simultaneously, duplicate data is populated in the tables.""}}, {""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformDNS/Data%20Connectors/README.md) for log setup and authentication setup tutorial.\n Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPDNS_CCPLogsSetup)\nAuthentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup)"", ""govScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformDNS/Data%20Connectors/README.md) for log setup and authentication setup tutorial.\n Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPDNS_CCPLogsSetupGov)\nAuthentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov)""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable DNS logs \n In the Google Cloud Console, navigate to Cloud DNS Section. Enable cloud logging if not enabled previously, and save the changes. Here, you can manage the existing zones, or create a new zone and create policies for the zone which you want to monitor.\n\nFor more information: [Link to documentation](https://cloud.google.com/dns/docs/zones/zones-overview)""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable GCP DNS Logs for Microsoft Sentinel, click on Add new collector button, provide the required information in the pop up and click on Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}], ""title"": ""Connect GCP DNS to Microsoft Sentinel\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformDNS/Data%20Connectors/GCPDNSLog_CCP/GCPDNSLog_ConnectorDefinition.json","true" +"GCPIAM","GoogleCloudPlatformIAM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformIAM","azuresentinel","azure-sentinel-solution-gcpiam","2021-07-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GCPIAMCCPDefinition","Microsoft","Google Cloud Platform IAM (via Codeless Connector Framework)","The Google Cloud Platform IAM data connector provides the capability to ingest the Audit logs relating to Identity and Access Management (IAM) activities within Google Cloud into Microsoft Sentinel using the Google IAM API. Refer to [GCP IAM API](https://cloud.google.com/iam/docs/reference/rest) documentation for more information.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** If both Azure Function and CCF connector are running parallelly, duplicate data is populated in the tables.""}}, {""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformIAM/Data%20Connectors/README.md) for log setup and authentication setup tutorial.\n Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPIAMCCPLogsSetup)\nAuthentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup)"", ""govScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformIAM/Data%20Connectors/README.md) for log setup and authentication setup tutorial.\n Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPIAMCCPLogsSetup)\nAuthentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov)""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. To enable IAM logs \n In your GCP account, navigate to the IAM section. From there, you can either create a new user or modify an existing user's role that you want to monitor. Be sure to save your changes..\n\nFor more information: [Link to documentation](https://cloud.google.com/assured-workloads/docs/iam-roles?hl=en)""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable GCPIAM Logs for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}], ""title"": ""Connect GCP IAM to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformIAM/Data%20Connectors/GCPIAMLog_CCP/GCPIAMLog_ConnectorDefinition.json","true" +"GCP_IAM_CL","GoogleCloudPlatformIAM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformIAM","azuresentinel","azure-sentinel-solution-gcpiam","2021-07-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GCPIAMDataConnector","Google","[DEPRECATED] Google Cloud Platform IAM","The Google Cloud Platform Identity and Access Management (IAM) data connector provides the capability to ingest [GCP IAM logs](https://cloud.google.com/iam/docs/audit-logging) into Microsoft Sentinel using the GCP Logging API. Refer to [GCP Logging API documentation](https://cloud.google.com/logging/docs/api) for more information.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the GCP API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**GCP_IAM**](https://aka.ms/sentinel-GCPIAMDataConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuring GCP and obtaining credentials**\n\n1. Make sure that Logging API is [enabled](https://cloud.google.com/apis/docs/getting-started#enabling_apis). \n\n2. (Optional) [Enable Data Access Audit logs](https://cloud.google.com/logging/docs/audit/configure-data-access#config-console-enable).\n\n3. [Create service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts) with [required permissions](https://cloud.google.com/iam/docs/audit-logging#audit_log_permissions) and [get service account key json file](https://cloud.google.com/iam/docs/creating-managing-service-account-keys).\n\n4. Prepare the list of GCP resources (organizations, folders, projects) to get logs from. [Learn more about GCP resources](https://cloud.google.com/resource-manager/docs/cloud-platform-resource-hierarchy).""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Azure Blob Storage connection string and container name, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-GCPIAMDataConnector-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Google Cloud Platform Resource Names**, **Google Cloud Platform Credentials File Content**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GCPIAMDataConnector-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions.\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tRESOURCE_NAMES\n\t\tCREDENTIALS_FILE_CONTENT\n\t\tWORKSPACE_ID\n\t\tSHARED_KEY\n\t\tlogAnalyticsUri (Optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""GCP service account"", ""description"": ""GCP service account with permissions to read logs is required for GCP Logging API. Also json file with service account key is required. See the documentation to learn more about [required permissions](https://cloud.google.com/iam/docs/audit-logging#audit_log_permissions), [creating service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts) and [creating service account key](https://cloud.google.com/iam/docs/creating-managing-service-account-keys).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformIAM/Data%20Connectors/GCP_IAM_API_FunctionApp.json","true" +"GCPIDS","GoogleCloudPlatformIDS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformIDS","azuresentinel","azure-sentinel-solution-gcpids","2022-07-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GCPCLOUDIDSLogsCCPDefinition","Microsoft","Google Cloud Platform Cloud IDS (via Codeless Connector Framework)","The Google Cloud Platform IDS data connector provides the capability to ingest Cloud IDS Traffic logs, Threat logs and Audit logs into Microsoft Sentinel using the Google Cloud IDS API. Refer to [Cloud IDS API](https://cloud.google.com/intrusion-detection-system/docs/audit-logging#google.cloud.ids.v1.IDS) documentation for more information.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformIDS/Data%20Connectors/README.md) for log setup and authentication setup tutorial.\n\n Find the Log set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPCloudIDSLogSetup)\n & the Authentication set up script: [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup)"", ""govScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformIDS/Data%20Connectors/README.md) for log setup and authentication setup tutorial.\n\n Find the Log set up script: [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPCloudIDSLogSetup)\n & the Authentication set up script: [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov)""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable IDS logs \n In the Google Cloud Console, enable Cloud IDS API, if not enabled previously. Create an IDS Endpoint and save the changes.\n\nFor more information on how to create and configure an IDS endpoint: [Link to documentation](https://cloud.google.com/intrusion-detection-system/docs/configuring-ids)""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable GCP IDS Logs for Microsoft Sentinel, click on Add new collector button, provide the required information in the pop up and click on Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}], ""title"": ""Connect GCP Cloud IDS to Microsoft Sentinel\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformIDS/Data%20Connectors/GCPCloudIDSLog_CCP/GCPCloudIDSLog_ConnectorDefinition.json","true" +"GCPNAT","GoogleCloudPlatformNAT","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformNAT","azuresentinel","azure-sentinel-solution-gcp-nat","2025-05-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GCPNATLogsCCPDefinition","Microsoft","Google Cloud Platform NAT (via Codeless Connector Framework)","The Google Cloud Platform NAT data connector provides the capability to ingest Cloud NAT Audit logs and Cloud NAT Traffic logs into Microsoft Sentinel using the Compute Engine API. Refer the [Product overview](https://cloud.google.com/nat/docs/overview) document for more details.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformNAT/Data%20Connectors/README.md) for log setup and authentication setup tutorial.\n\n Find the Log set up script [**here**](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPCloudNATLogsSetup/GCPCloudNATLogsSetup.tf)\n & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup)"", ""govScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformNAT/Data%20Connectors/README.md) for log setup and authentication setup tutorial.\n\n Find the Log set up script [**here**](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPCloudNATLogsSetup/GCPCloudNATLogsSetup.tf)\n & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov)""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable NAT logs \n In the Google Cloud Console, enable cloud logging if not enabled previously, and save the changes. Navigate to Cloud NAT section and click on Add origin to create backends as per link provided below. \n\n Reference Link: [Link to documentation](https://cloud.google.com/nat/docs/monitoring)""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable GCP Cloud NAT Logs for Microsoft Sentinel, click on Add new collector button, provide the required information in the pop up and click on Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}], ""title"": ""Connect GCP NAT to Microsoft Sentinel\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformNAT/Data%20Connectors/GCPNATLogs_ccp/GCPNATLogs_ConnectorDefinition.json","true" +"GCPNATAudit","GoogleCloudPlatformNAT","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformNAT","azuresentinel","azure-sentinel-solution-gcp-nat","2025-05-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GCPNATLogsCCPDefinition","Microsoft","Google Cloud Platform NAT (via Codeless Connector Framework)","The Google Cloud Platform NAT data connector provides the capability to ingest Cloud NAT Audit logs and Cloud NAT Traffic logs into Microsoft Sentinel using the Compute Engine API. Refer the [Product overview](https://cloud.google.com/nat/docs/overview) document for more details.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformNAT/Data%20Connectors/README.md) for log setup and authentication setup tutorial.\n\n Find the Log set up script [**here**](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPCloudNATLogsSetup/GCPCloudNATLogsSetup.tf)\n & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup)"", ""govScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformNAT/Data%20Connectors/README.md) for log setup and authentication setup tutorial.\n\n Find the Log set up script [**here**](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPCloudNATLogsSetup/GCPCloudNATLogsSetup.tf)\n & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov)""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable NAT logs \n In the Google Cloud Console, enable cloud logging if not enabled previously, and save the changes. Navigate to Cloud NAT section and click on Add origin to create backends as per link provided below. \n\n Reference Link: [Link to documentation](https://cloud.google.com/nat/docs/monitoring)""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable GCP Cloud NAT Logs for Microsoft Sentinel, click on Add new collector button, provide the required information in the pop up and click on Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}], ""title"": ""Connect GCP NAT to Microsoft Sentinel\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformNAT/Data%20Connectors/GCPNATLogs_ccp/GCPNATLogs_ConnectorDefinition.json","true" +"GCPResourceManager","GoogleCloudPlatformResourceManager","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformResourceManager","azuresentinel","azure-sentinel-solution-gcp-rm","2025-03-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GCPResourceManagerLogsCCFDefinition","Microsoft","Google Cloud Platform Resource Manager (via Codeless Connector Framework)","The Google Cloud Platform Resource Manager data connector provides the capability to ingest Resource Manager [Admin Activity and Data Access Audit logs](https://cloud.google.com/resource-manager/docs/audit-logging) into Microsoft Sentinel using the Cloud Resource Manager API. Refer the [Product overview](https://cloud.google.com/resource-manager/docs/cloud-platform-resource-hierarchy) document for more details.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleCloudPlatformResourceManager/Data%20Connectors/README.md) for log setup and authentication setup tutorial.\n\n Find the Log set up script [**here**](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPResourceManagerLogsSetup/GCPResourceManagerLogSetup.tf)\n & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup)"", ""govScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleCloudPlatformResourceManager/Data%20Connectors/README.md) for log setup and authentication setup tutorial.\n\n Find the Log set up script [**here**](https://raw.githubusercontent.com/Azure/Azure-Sentinel/c1cb589dad1add228f78e629073a9b069ce52991/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPResourceManagerLogsSetup/GCPResourceManagerLogSetup.tf)\n & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov)""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable Resource Manager logs \n In the Google Cloud Console, enable cloud resource manager API if not enabled previously, and save the changes. Make sure to have organization level IAM permissions for your account to see all logs in the resource hierarchy. You can refer the document links for different IAM permissions for access control with IAM at each level provided in this [link](https://cloud.google.com/resource-manager/docs/how-to)""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable GCP Resource Manager Logs for Microsoft Sentinel, click on Add new collector button, provide the required information in the pop up and click on Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}], ""title"": ""Connect GCP Resource Manager to Microsoft Sentinel\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformResourceManager/Data%20Connectors/GCPResourceManagerAuditLogs_ccf/GCPResourceManagerAuditLogs_ConnectorDefinition.json","true" +"GCPCloudSQL","GoogleCloudPlatformSQL","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformSQL","azuresentinel","azure-sentinel-solution-gcpsql","2021-07-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GCPCloudSQLCCFDefinition","Microsoft","GCP Cloud SQL (via Codeless Connector Framework)","The GCP Cloud SQL data connector provides the capability to ingest Audit logs into Microsoft Sentinel using the GCP Cloud SQL API. Refer to [GCP cloud SQL Audit Logs](https://cloud.google.com/sql/docs/mysql/audit-logging) documentation for more information.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformSQL/Data%20Connectors/Readme.md) for log setup and authentication setup tutorial.\n Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPCloudSQLLogsSetup/GCPCloudSQLLogsSetup.tf)\nAuthentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup)"", ""govScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformSQL/Data%20Connectors/Readme.md) for log setup and authentication setup tutorial.\n Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPCloudSQLLogsSetup/GCPCloudSQLLogsSetup.tf)\nAuthentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov)""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. In the Google Cloud Console, enable Cloud SQL API, if not enabled previously, and save the changes.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable GCP Cloud SQL Logs for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}], ""title"": ""Connect GCP Cloud SQL to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformSQL/Data%20Connectors/GCPCloudSQLLog_CCF/GCPCloudSQLLog_ConnectorDefinition.json","true" +"","GoogleDirectory","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleDirectory","","","","","","","","","","","","","","","","","","false" +"GoogleWorkspaceReports","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceCCPDefinition","Microsoft","Google Workspace Activities (via Codeless Connector Framework)","The [Google Workspace](https://workspace.google.com/) Activities data connector provides the capability to ingest Activity Events from [Google Workspace API](https://developers.google.com/admin-sdk/reports/reference/rest/v1/activities/list) into Microsoft Sentinel.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Google Reports API\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add https://portal.azure.com/TokenAuthorize/ExtensionName/Microsoft_Azure_Security_Insights as the Authorized redirect URIs.\n\t 4. Once you click Create, you will be provided with the Client ID and Client Secret. \n\tCopy these values and use them in the configuration steps below.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Configure steps for the Google Reports API oauth access. Then, provide the required information below and click on Connect.\n>""}}, {""description"": ""Configure steps for the Google Reports API oauth access. Then, provide the required information below and click on Connect.\n>"", ""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""name"", ""columnName"": ""Name""}, {""columnValue"": ""id"", ""columnName"": ""ID""}]}}], ""title"": ""Connect to Google Workspace to start collecting user activity logs into Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Google Workspace API access"", ""description"": ""Access to the Google Workspace activities API through Oauth are required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GoogleWorkspaceTemplate_ccp/GoogleWorkspaceReports_DataConnectorDefinition.json","true" +"GWorkspace_ReportsAPI_access_transparency_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_admin_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_calendar_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_chat_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_chrome_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_context_aware_access_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_data_studio_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_drive_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_gcp_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_gplus_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_groups_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_groups_enterprise_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_jamboard_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_keep_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_login_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_meet_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_mobile_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_rules_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_saml_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_token_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_user_accounts_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GoogleWorkspaceReports_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"ThreatIntelligenceIndicator","GreyNoiseThreatIntelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GreyNoiseThreatIntelligence","greynoiseintelligenceinc1681236078693","microsoft-sentinel-byol-greynoise","2023-09-05","2025-07-28","","GreyNoise","Partner","https://www.greynoise.io/contact/general","","domains","GreyNoise2SentinelAPI","GreyNoise, Inc. and BlueCycle LLC","GreyNoise Threat Intelligence","This Data Connector installs an Azure Function app to download GreyNoise indicators once per day and inserts them into the ThreatIntelligenceIndicator table in Microsoft Sentinel.","[{""title"": ""You can connect GreyNoise Threat Intelligence to Microsoft Sentinel by following the below steps: "", ""description"": ""\n> The following steps create an Azure AAD application, retrieves a GreyNoise API key, and saves the values in an Azure Function App Configuration.""}, {""title"": ""1. Retrieve your API Key from GreyNoise Visualizer."", ""description"": ""Generate an API key from GreyNoise Visualizer https://docs.greynoise.io/docs/using-the-greynoise-api""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID and Client ID. Also, get the Log Analytics Workspace ID associated with your Microsoft Sentinel instance (it should display below)."", ""description"": ""Follow the instructions here to create your Azure AAD app and save your Client ID and Tenant ID: https://learn.microsoft.com/en-us/azure/sentinel/connect-threat-intelligence-upload-api#instructions\n NOTE: Wait until step 5 to generate your client secret."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Follow the instructions here to add the Microsoft Sentinel Contributor Role: https://learn.microsoft.com/en-us/azure/sentinel/connect-threat-intelligence-upload-api#assign-a-role-to-the-application""}, {""title"": ""4. Specify the AAD permissions to enable MS Graph API access to the upload-indicators API."", ""description"": ""Follow this section here to add **'ThreatIndicators.ReadWrite.OwnedBy'** permission to the AAD App: https://learn.microsoft.com/en-us/azure/sentinel/connect-threat-intelligence-tip#specify-the-permissions-required-by-the-application. \n Back in your AAD App, ensure you grant admin consent for the permissions you just added. \n Finally, in the 'Tokens and APIs' section, generate a client secret and save it. You will need it in Step 6. ""}, {""title"": ""5. Deploy the Threat Intelligence (Preview) Solution, which includes the Threat Intelligence Upload Indicators API (Preview)"", ""description"": ""See Microsoft Sentinel Content Hub for this Solution, and install it in the Microsoft Sentinel instance.""}, {""title"": ""6. Deploy the Azure Function"", ""description"": ""Click the Deploy to Azure button.\n\n [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-GreyNoise-azuredeploy)\n\n Fill in the appropriate values for each parameter. **Be aware** that the only valid values for the **GREYNOISE_CLASSIFICATIONS** parameter are **benign**, **malicious** and/or **unknown**, which must be comma-separated.""}, {""title"": ""7. Send indicators to Sentinel"", ""description"": ""The function app installed in Step 6 queries the GreyNoise GNQL API once per day, and submits each indicator found in STIX 2.1 format to the [Microsoft Upload Threat Intelligence Indicators API](https://learn.microsoft.com/en-us/azure/sentinel/upload-indicators-api). \n Each indicator expires in ~24 hours from creation unless found on the next day's query. In this case the TI Indicator's **Valid Until** time is extended for another 24 hours, which keeps it active in Microsoft Sentinel. \n\n For more information on the GreyNoise API and the GreyNoise Query Language (GNQL), [click here](https://developer.greynoise.io/docs/using-the-greynoise-api).""}]","{""resourceProvider"": [{""provider"": ""Microsoft.SecurityInsights/threatintelligence/write"", ""permissionsDisplayText"": ""write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""GreyNoise API Key"", ""description"": ""Retrieve your GreyNoise API Key [here](https://viz.greynoise.io/account/api-key).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GreyNoiseThreatIntelligence/Data%20Connectors/GreyNoiseConnector_UploadIndicatorsAPI.json","true" +"","Group-IB","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Group-IB","","","","","","","","","","","","","","","","","","false" +"","HIPAA Compliance","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/HIPAA%20Compliance","azuresentinel","azure-sentinel-solution-hipaacompliance","2025-10-08","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"","HYAS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/HYAS","hyas","a-hyas-insight-azure-sentinel-solutions-gallery","2021-10-20","","","HYAS","Partner","https://www.hyas.com/contact","","domains","","","","","","","","false" +"HYASProtectDnsSecurityLogs_CL","HYAS Protect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/HYAS%20Protect","hyas","microsoft-sentinel-solution-hyas-protect","2023-09-26","","","HYAS","Partner","https://www.hyas.com/contact","","domains","HYASProtect","HYAS","HYAS Protect","HYAS Protect provide logs based on reputation values - Blocked, Malicious, Permitted, Suspicious.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the HYAS API to pull Logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the HYAS Protect data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-HYASProtect-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Function Name**, **Table Name**, **Workspace ID**, **Workspace Key**, **API Key**, **TimeInterval**, **FetchBlockedDomains**, **FetchMaliciousDomains**, **FetchSuspiciousDomains**, **FetchPermittedDomains** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the HYAS Protect Logs data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> NOTE:You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-HYASProtect-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. HyasProtectLogsXXX).\n\n\te. **Select a runtime:** Choose Python 3.8.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAPIKey\n\t\tPolling\n\t\tWorkspaceID\n\t\tWorkspaceKey\n. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**HYAS API Key** is required for making API calls.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/HYAS%20Protect/Data%20Connectors/HYASProtect_FunctionApp.json","true" +"net_assets_CL","HolmSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/HolmSecurity","holmsecurityswedenab1639511288603","holmsecurity_sc_sentinel","2022-07-18","","","Holm Security","Partner","https://support.holmsecurity.com/","","domains","HolmSecurityAssets","Holm Security","Holm Security Asset Data","The connector provides the capability to poll data from Holm Security Center into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Holm Security Assets to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Holm Security API**\n\n [Follow these instructions](https://support.holmsecurity.com/knowledge/how-do-i-set-up-an-api-token) to create an API authentication token.""}, {""title"": """", ""description"": ""**STEP 2 - Use the below deployment option to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Holm Security connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Holm Security API authorization Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template Deployment"", ""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the Holm Security connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-holmsecurityassets-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password**, 'and/or Other required fields'. \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Holm Security API Token"", ""description"": ""Holm Security API Token is required. [Holm Security API Token](https://support.holmsecurity.com/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/HolmSecurity/Data%20Connectors/HolmSecurityAssets_API_FunctionApp.json","true" +"web_assets_CL","HolmSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/HolmSecurity","holmsecurityswedenab1639511288603","holmsecurity_sc_sentinel","2022-07-18","","","Holm Security","Partner","https://support.holmsecurity.com/","","domains","HolmSecurityAssets","Holm Security","Holm Security Asset Data","The connector provides the capability to poll data from Holm Security Center into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Holm Security Assets to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Holm Security API**\n\n [Follow these instructions](https://support.holmsecurity.com/knowledge/how-do-i-set-up-an-api-token) to create an API authentication token.""}, {""title"": """", ""description"": ""**STEP 2 - Use the below deployment option to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Holm Security connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Holm Security API authorization Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template Deployment"", ""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the Holm Security connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-holmsecurityassets-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password**, 'and/or Other required fields'. \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Holm Security API Token"", ""description"": ""Holm Security API Token is required. [Holm Security API Token](https://support.holmsecurity.com/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/HolmSecurity/Data%20Connectors/HolmSecurityAssets_API_FunctionApp.json","true" +"","HoneyTokens","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/HoneyTokens","","","","","","","","","","","","","","","","","","false" +"CyberpionActionItems_CL","IONIX","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IONIX","cyberpion1597832716616","cyberpion_mss","2022-05-02","","","IONIX","Partner","https://www.ionix.io/contact-us/","","domains","CyberpionSecurityLogs","IONIX","IONIX Security Logs","The IONIX Security Logs data connector, ingests logs from the IONIX system directly into Sentinel. The connector allows users to visualize their data, create alerts and incidents and improve security investigations.","[{""title"": """", ""description"": ""Follow the [instructions](https://www.ionix.io/integrations/azure-sentinel/) to integrate IONIX Security Alerts into Sentinel."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""IONIX Subscription"", ""description"": ""a subscription and account is required for IONIX logs. [One can be acquired here.](https://azuremarketplace.microsoft.com/en/marketplace/apps/cyberpion1597832716616.cyberpion)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IONIX/Data%20Connectors/IONIXSecurityLogs.json","true" +"","IPQualityScore","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPQualityScore","ipqualityscorellc1632794263588","ipqs_1","2021-10-20","","","IPQS Plugins Team","Partner","https://www.ipqualityscore.com/contact-us","","domains","","","","","","","","false" +"Ipinfo_ASN_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoASNDataConnector","IPinfo","IPinfo ASN Data Connector","This IPinfo data connector installs an Azure Function app to download standard_ASN datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-ASN-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-ASN-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/ASN/IPinfo_ASN_API_AzureFunctionApp.json","true" +"Ipinfo_Abuse_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoAbuseDataConnector","IPinfo","IPinfo Abuse Data Connector","This IPinfo data connector installs an Azure Function app to download standard_abuse datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-Abuse-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-Abuse-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/Abuse/IPinfo_Abuse_API_AzureFunctionApp.json","true" +"Ipinfo_Carrier_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoCarrierDataConnector","IPinfo","IPinfo Carrier Data Connector","This IPinfo data connector installs an Azure Function app to download standard_carrier datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-Carrier-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-Carrier-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/Carrier/IPinfo_Carrier_API_AzureFunctionApp.json","true" +"Ipinfo_Company_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoCompanyDataConnector","IPinfo","IPinfo Company Data Connector","This IPinfo data connector installs an Azure Function app to download standard_company datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-Company-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-Company-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/Company/IPinfo_Company_API_AzureFunctionApp.json","true" +"Ipinfo_Country_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoCountryDataConnector","IPinfo","IPinfo Country ASN Data Connector","This IPinfo data connector installs an Azure Function app to download country_asn datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-Country-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-Country-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/Country%20ASN/IPinfo_Country_API_AzureFunctionApp.json","true" +"Ipinfo_Domain_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoDomainDataConnector","IPinfo","IPinfo Domain Data Connector","This IPinfo data connector installs an Azure Function app to download standard_domain datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-Domain-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-Domain-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/Domain/IPinfo_Domain_API_AzureFunctionApp.json","true" +"Ipinfo_Location_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoIplocationDataConnector","IPinfo","IPinfo Iplocation Data Connector","This IPinfo data connector installs an Azure Function app to download standard_location datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-Iplocation-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-Iplocation-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/Iplocation/IPinfo_Iplocation_API_AzureFunctionApp.json","true" +"Ipinfo_Location_extended_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoIplocationExtendedDataConnector","IPinfo","IPinfo Iplocation Extended Data Connector","This IPinfo data connector installs an Azure Function app to download standard_location_extended datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-Iplocation-Extended-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-Iplocation-Extended-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/Iplocation%20Extended/IPinfo_Iplocation_Extended_API_AzureFunctionApp.json","true" +"Ipinfo_Privacy_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoPrivacyDataConnector","IPinfo","IPinfo Privacy Data Connector","This IPinfo data connector installs an Azure Function app to download standard_privacy datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-Privacy-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-Privacy-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/Privacy/IPinfo_Privacy_API_AzureFunctionApp.json","true" +"Ipinfo_Privacy_extended_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoPrivacyExtendedDataConnector","IPinfo","IPinfo Privacy Extended Data Connector","This IPinfo data connector installs an Azure Function app to download standard_privacy datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-Privacy-Extended-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-Privacy-Extended-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/Privacy%20Extended/IPinfo_Privacy_Extended_API_AzureFunctionApp.json","true" +"Ipinfo_RIRWHOIS_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoRIRWHOISDataConnector","IPinfo","IPinfo RIRWHOIS Data Connector","This IPinfo data connector installs an Azure Function app to download RIRWHOIS datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-RIRWHOIS-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-RIRWHOIS-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/RIRWHOIS/IPinfo_RIRWHOIS_API_AzureFunctionApp.json","true" +"Ipinfo_RWHOIS_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoRWHOISDataConnector","IPinfo","IPinfo RWHOIS Data Connector","This IPinfo data connector installs an Azure Function app to download RWHOIS datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-RWHOIS-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-RWHOIS-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/RWHOIS/IPinfo_RWHOIS_API_AzureFunctionApp.json","true" +"Ipinfo_WHOIS_ASN_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoWHOISASNDataConnector","IPinfo","IPinfo WHOIS ASN Data Connector","This IPinfo data connector installs an Azure Function app to download WHOIS_ASN datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-WHOIS-ASN-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-WHOIS-ASN-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/WHOIS%20ASN/IPinfo_WHOIS_ASN_API_AzureFunctionApp.json","true" +"Ipinfo_WHOIS_MNT_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoWHOISMNTDataConnector","IPinfo","IPinfo WHOIS MNT Data Connector","This IPinfo data connector installs an Azure Function app to download WHOIS_MNT datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-WHOIS-MNT-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-WHOIS-MNT-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/WHOIS%20MNT/IPinfo_WHOIS_MNT_API_AzureFunctionApp.json","true" +"Ipinfo_WHOIS_NET_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoWHOISNETDataConnector","IPinfo","IPinfo WHOIS NET Data Connector","This IPinfo data connector installs an Azure Function app to download WHOIS_NET datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-WHOIS-NET-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-WHOIS-NET-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/WHOIS%20NET/IPinfo_WHOIS_NET_API_AzureFunctionApp.json","true" +"Ipinfo_WHOIS_ORG_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoWHOISORGDataConnector","IPinfo","IPinfo WHOIS ORG Data Connector","This IPinfo data connector installs an Azure Function app to download WHOIS_ORG datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-WHOIS-ORG-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-WHOIS-ORG-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/WHOIS%20ORG/IPinfo_WHOIS_ORG_API_AzureFunctionApp.json","true" +"Ipinfo_WHOIS_POC_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoWHOISPOCDataConnector","IPinfo","IPinfo WHOIS POC Data Connector","This IPinfo data connector installs an Azure Function app to download WHOIS_POC datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-WHOIS-POC-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-WHOIS-POC-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/WHOIS%20POC/IPinfo_WHOIS_POC_API_AzureFunctionApp.json","true" +"Syslog","ISC Bind","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ISC%20Bind","azuresentinel","azure-sentinel-solution-iscbind","2022-09-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ISCBind","ISC","[Deprecated] ISC Bind","The [ISC Bind](https://www.isc.org/bind/) connector allows you to easily connect your ISC Bind logs with Microsoft Sentinel. This gives you more insight into your organization's network traffic data, DNS query data, traffic statistics and improves your security operation capabilities.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias ISCBind and load the function code or click [here](https://aka.ms/sentinel-iscbind-parser).The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n 2. Select **Apply below configuration to my machines** and select the facilities and severities.\n 3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure and connect the ISC Bind"", ""description"": ""1. Follow these instructions to configure the ISC Bind to forward syslog: \n - [DNS Logs](https://kb.isc.org/docs/aa-01526) \n2. Configure Syslog to send the Syslog traffic to Agent. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""ISC Bind"", ""description"": ""must be configured to export logs via Syslog""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ISC%20Bind/Data%20Connectors/Connector_Syslog_ISCBind.json","true" +"CommonSecurityLog","Illumio Core","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illumio%20Core","azuresentinel","azure-sentinel-solution-illumiocore","2022-05-26","","","Microsoft","Microsoft","https://support.microsoft.com","","domains","IllumioCore","Illumio","[Deprecated] Illumio Core via Legacy Agent","The [Illumio Core](https://www.illumio.com/products/) data connector provides the capability to ingest Illumio Core logs into Microsoft Sentinel.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias IllumioCoreEvent and load the function code or click [here](https://aka.ms/sentinel-IllumioCore-parser).The function usually takes 10-15 minutes to activate after solution installation/update and maps Illumio Core events to Microsoft Sentinel Information Model (ASIM).""}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Configure Ilumio Core to send logs using CEF"", ""description"": ""2.1 Configure Event Format\n\n 1. From the PCE web console menu, choose **Settings > Event Settings** to view your current settings.\n\n 2. Click **Edit** to change the settings.\n\n 3. Set **Event Format** to CEF.\n\n 4. (Optional) Configure **Event Severity** and **Retention Period**.\n\n2.2 Configure event forwarding to an external syslog server\n\n 1. From the PCE web console menu, choose **Settings > Event Settings**.\n\n 2. Click **Add**.\n\n 3. Click **Add Repository**.\n\n 4. Complete the **Add Repository** dialog.\n\n 5. Click **OK** to save the event forwarding configuration.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illumio%20Core/Data%20Connectors/Connector_IllumioCore_CEF.json","true" +"CommonSecurityLog","Illumio Core","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illumio%20Core","azuresentinel","azure-sentinel-solution-illumiocore","2022-05-26","","","Microsoft","Microsoft","https://support.microsoft.com","","domains","IllumioCoreAma","Illumio","[Deprecated] Illumio Core via AMA","The [Illumio Core](https://www.illumio.com/products/) data connector provides the capability to ingest Illumio Core logs into Microsoft Sentinel.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias IllumioCoreEvent and load the function code or click [here](https://aka.ms/sentinel-IllumioCore-parser).The function usually takes 10-15 minutes to activate after solution installation/update and maps Illumio Core events to Microsoft Sentinel Information Model (ASIM)."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine.""}, {""title"": ""Step B. Configure Ilumio Core to send logs using CEF"", ""description"": ""Configure Event Format\n\n 1. From the PCE web console menu, choose **Settings > Event Settings** to view your current settings.\n\n 2. Click **Edit** to change the settings.\n\n 3. Set **Event Format** to CEF.\n\n 4. (Optional) Configure **Event Severity** and **Retention Period**.\n\nConfigure event forwarding to an external syslog server\n\n 1. From the PCE web console menu, choose **Settings > Event Settings**.\n\n 2. Click **Add**.\n\n 3. Click **Add Repository**.\n\n 4. Complete the **Add Repository** dialog.\n\n 5. Click **OK** to save the event forwarding configuration.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illumio%20Core/Data%20Connectors/template_IllumioCoreAMA.json","true" +"IllumioInsights_CL","Illumio Insight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illumio%20Insight","illumioinc1629822633689","azure-sentinel-solution-illumioinsight","2025-08-10","","","Illumio","Partner","https://www.illumio.com/support/support","","domains","IllumioInsightsDefinition","Microsoft","Illumio Insights","Illumio Insights Connector sends workload and security graph data from Illumio Insights into the Azure Microsoft Sentinel Data Lake, providing deep context for threat detection, lateral movement analysis, and real-time investigation.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Illumio Insights Connector\n\n**Prerequisites**\n- Register and Login to Illumio Console with valid credentials\n- Purchase Illumio Insights or Start a free Trial for Illumio Insights\n\n**Step 1: Register the Service Account**\n1. Go to **Illumio Console \u2192 Access \u2192 Service Accounts**\n2. Create a service account for the tenant\n3. Once you create a service account, you will receive the client credentials\n4. Copy the **auth_username** (Illumio Insights API Key) and the **Secret** (API Secret)\n\n**Step 2: Add Client Credentials to Sentinel Account**\n- Add the API key and secret to Sentinel Account for tenant authentication\n- These credentials will be used to authenticate calls to the Illumio SaaS API\n\nPlease fill in the required fields below with the credentials obtained from the Illumio Console:""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Illumio Insights Api Key"", ""placeholder"": ""api_XXXXXX"", ""type"": ""password"", ""name"": ""apiKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Api Secret"", ""placeholder"": ""API Secret"", ""type"": ""password"", ""name"": ""apiToken""}}, {""parameters"": {""label"": ""Illumio Tenant Id"", ""placeholder"": ""{IllumioTenantId - Optional}"", ""type"": ""text"", ""name"": ""illumioTenantId""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illumio%20Insight/Data%20Connectors/IllumioInsight_CCP/IllumioInsight_Definition.json","true" +"IllumioInsightsSummary_CL","Illumio Insight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illumio%20Insight","illumioinc1629822633689","azure-sentinel-solution-illumioinsight","2025-08-10","","","Illumio","Partner","https://www.illumio.com/support/support","","domains","IllumioInsightsSummaryCCP","Illumio","Illumio Insights Summary","The Illumio Insights Summary connector Publishes AI-powered threat discovery and anomaly reports generated by the Illumio Insights Agent. Leveraging the MITRE ATT&CK framework, these reports surface high-fidelity insights into emerging threats and risky behaviors, directly into the Data Lake.","[{""title"": ""1. Configuration"", ""description"": ""Configure the Illumio Insights Summary connector."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""> This data connector may take 24 hrs for the latest report after onboarding""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Illumio Insights Summary Connector\n\n**Prerequisites**\n- Register and Login to Illumio Console with valid credentials\n- Purchase Illumio Insights or Start a free Trial for Illumio Insights\n- Enable The Illumio Insights Agent\n\n**Step 1: Register the Service Account**\n1. Go to **Illumio Console \u2192 Access \u2192 Service Accounts**\n2. Create a service account for the tenant\n3. Once you create a service account, you will receive the client credentials\n4. Copy the **auth_username** (Illumio Insights API Key) and the **Secret** (API Secret)\n\n**Step 2: Add Client Credentials to Sentinel Account**\n- Add the API key and secret to Sentinel Account for tenant authentication\n- These credentials will be used to authenticate calls to the Illumio SaaS API \n\nPlease fill in the required fields below with the credentials obtained from the Illumio Console:""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Illumio Insights Api Key"", ""placeholder"": ""api_XXXXXX"", ""type"": ""password"", ""name"": ""apiKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Api Secret"", ""placeholder"": ""API Secret"", ""type"": ""password"", ""name"": ""apiToken""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Illumio Tenant ID"", ""placeholder"": ""{IllumioTenantId - Optional}"", ""type"": ""text"", ""name"": ""illumioTenantId""}}]}, {""title"": ""2. Connect"", ""description"": ""Enable the Illumio Insights Summary connector."", ""instructions"": [{""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""disconnectLabel"": ""Disconnect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illumio%20Insight/Data%20Connectors/IllumioInsightsSummaryConnector_CCP/IllumioInsightsSummary_ConnectorDefinition.json","true" +"Illumio_Auditable_Events_CL","IllumioSaaS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IllumioSaaS","illumioinc1629822633689","illumio_sentinel","2024-05-13","","","Illumio","Partner","https://www.illumio.com/support/support","","domains","IllumioSaaSDataConnector","Illumio","Illumio SaaS","[Illumio](https://www.illumio.com/) connector provides the capability to ingest events into Microsoft Sentinel. The connector provides ability to ingest auditable and flow events from AWS S3 bucket.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Ensure AWS SQS is configured for the s3 bucket from which flow and auditable event logs are going to be pulled. In case, Illumio provides bucket, please contact Illumio support for sqs url, s3 bucket name and aws credentials. \n 2. Register AAD application - For DCR (Data collection rule) to authentiate to ingest data into log analytics, you must use Entra application. 1. [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n 2. Ensure you have created a log analytics workspace. \nPlease keep note of the name and region where it has been deployed.""}, {""title"": ""Deployment"", ""description"": ""Choose one of the approaches from below options. Either use the below ARM template to deploy azure resources or deploy function app manually.""}, {""title"": ""1. Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of Azure resources using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IllumioSaaS-FunctionApp) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""2. Deploy additional function apps to handle scale"", ""description"": ""Use this method for automated deployment of additional function apps using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IllumioSaaS-QueueTriggerFunctionApp) \t\t\t\n""}, {""title"": ""3. Manual Deployment of Azure Functions"", ""description"": ""Deployment via Visual Studio Code.""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n1. Download the [Azure Function App](https://github.com/Azure/Azure-Sentinel/raw/master/Solutions/IllumioSaaS/Data%20Connectors/IllumioEventsConn.zip) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. Follow documentation to set up all required environment variables and click **Save**. Ensure you restart the function app once settings are saved.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](). If you are using s3 bucket provided by Illumio, contact Illumio support. At your request they will provide you with the AWS S3 bucket name, AWS SQS url and AWS credentials to access them.""}, {""name"": ""Illumio API key and secret"", ""description"": ""**ILLUMIO_API_KEY**, **ILLUMIO_API_SECRET** is required for a workbook to make connection to SaaS PCE and fetch api responses.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IllumioSaaS/Data%20Connectors/IllumioSaaS_FunctionApp.json","true" +"Illumio_Flow_Events_CL","IllumioSaaS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IllumioSaaS","illumioinc1629822633689","illumio_sentinel","2024-05-13","","","Illumio","Partner","https://www.illumio.com/support/support","","domains","IllumioSaaSDataConnector","Illumio","Illumio SaaS","[Illumio](https://www.illumio.com/) connector provides the capability to ingest events into Microsoft Sentinel. The connector provides ability to ingest auditable and flow events from AWS S3 bucket.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Ensure AWS SQS is configured for the s3 bucket from which flow and auditable event logs are going to be pulled. In case, Illumio provides bucket, please contact Illumio support for sqs url, s3 bucket name and aws credentials. \n 2. Register AAD application - For DCR (Data collection rule) to authentiate to ingest data into log analytics, you must use Entra application. 1. [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n 2. Ensure you have created a log analytics workspace. \nPlease keep note of the name and region where it has been deployed.""}, {""title"": ""Deployment"", ""description"": ""Choose one of the approaches from below options. Either use the below ARM template to deploy azure resources or deploy function app manually.""}, {""title"": ""1. Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of Azure resources using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IllumioSaaS-FunctionApp) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""2. Deploy additional function apps to handle scale"", ""description"": ""Use this method for automated deployment of additional function apps using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IllumioSaaS-QueueTriggerFunctionApp) \t\t\t\n""}, {""title"": ""3. Manual Deployment of Azure Functions"", ""description"": ""Deployment via Visual Studio Code.""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n1. Download the [Azure Function App](https://github.com/Azure/Azure-Sentinel/raw/master/Solutions/IllumioSaaS/Data%20Connectors/IllumioEventsConn.zip) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. Follow documentation to set up all required environment variables and click **Save**. Ensure you restart the function app once settings are saved.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](). If you are using s3 bucket provided by Illumio, contact Illumio support. At your request they will provide you with the AWS S3 bucket name, AWS SQS url and AWS credentials to access them.""}, {""name"": ""Illumio API key and secret"", ""description"": ""**ILLUMIO_API_KEY**, **ILLUMIO_API_SECRET** is required for a workbook to make connection to SaaS PCE and fetch api responses.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IllumioSaaS/Data%20Connectors/IllumioSaaS_FunctionApp.json","true" +"IllumioFlowEventsV2_CL","IllumioSaaS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IllumioSaaS","illumioinc1629822633689","illumio_sentinel","2024-05-13","","","Illumio","Partner","https://www.illumio.com/support/support","","domains","IllumioSaasCCFDefinition","Microsoft","Illumio Saas","The Illumio Saas Cloud data connector provides the capability to ingest Flow logs into Microsoft Sentinel using the Illumio Saas Log Integration through AWS S3 Bucket. Refer to [Illumio Saas Log Integration](https://product-docs-repo.illumio.com/Tech-Docs/CloudSecure/out/en/administer-cloudsecure/connector.html#UUID-c14edaab-9726-1f23-9c4c-bc2937be39ee_section-idm234556433515698) for more information.","[{""title"": ""Connect Illumio Saas to Microsoft Sentinel\n\n"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** This connector fetches the Illumio Saas Flow logs from AWS S3 bucket""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""To gather data from Illumio, you need to configure the following resources""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS Role ARN \n To gather data from Illumio, you'll need AWS Role ARN.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. AWS SQS Queue URL \n To gather data from Illumio, you'll need AWS SQS Queue URL.\n\n""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""For detailed steps to retrieve the AWS Role ARN, SQS Queue URL, and configure Illumio log forwarding to the Amazon S3 bucket, refer to the [Connector Setup Guide](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IllumioSaaS/Data%20Connectors/IllumioSaasLogs_ccf/Readme.md).""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""AWS Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""AWS SQS Queue URL""}, {""columnValue"": ""properties.destinationTable"", ""columnName"": ""Table Name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""placeholder"": ""Enter Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Flow Log Queue URL"", ""placeholder"": ""Enter Flow log SQL Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""required"": true}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IllumioSaaS/Data%20Connectors/IllumioSaasLogs_ccf/IllumioSaasLogs_ConnectorDefinition.json","true" +"","Illusive Active Defense","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illusive%20Active%20Defense","","","","","","","","","","","","","","","","","","false" +"CommonSecurityLog","Illusive Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illusive%20Platform","illusivenetworks","illusive_platform_mss","2022-05-25","","","Illusive Networks","Partner","https://illusive.com/support","","domains","illusiveAttackManagementSystem","illusive","[Deprecated] Illusive Platform via Legacy Agent","The Illusive Platform Connector allows you to share Illusive's attack surface analysis data and incident logs with Microsoft Sentinel and view this information in dedicated dashboards that offer insight into your organization's attack surface risk (ASM Dashboard) and track unauthorized lateral movement in your organization's network (ADS Dashboard).","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Illusive Common Event Format (CEF) logs to Syslog agent"", ""description"": ""1. Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.\n> 2. Log onto the Illusive Console, and navigate to Settings->Reporting.\n> 3. Find Syslog Servers\n> 4. Supply the following information:\n>> 1. Host name: Linux Syslog agent IP address or FQDN host name\n>> 2. Port: 514\n>> 3. Protocol: TCP\n>> 4. Audit messages: Send audit messages to server\n> 5. To add the syslog server, click Add.\n> 6. For more information about how to add a new syslog server in the Illusive platform, please find the Illusive Networks Admin Guide in here: https://support.illusivenetworks.com/hc/en-us/sections/360002292119-Documentation-by-Version""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illusive%20Platform/Data%20Connectors/illusive%20Attack%20Management%20System.json","true" +"CommonSecurityLog","Illusive Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illusive%20Platform","illusivenetworks","illusive_platform_mss","2022-05-25","","","Illusive Networks","Partner","https://illusive.com/support","","domains","illusiveAttackManagementSystemAma","illusive","[Deprecated] Illusive Platform via AMA","The Illusive Platform Connector allows you to share Illusive's attack surface analysis data and incident logs with Microsoft Sentinel and view this information in dedicated dashboards that offer insight into your organization's attack surface risk (ASM Dashboard) and track unauthorized lateral movement in your organization's network (ADS Dashboard).","[{""title"": """", ""description"": """", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine""}, {""title"": ""Step B. Forward Illusive Common Event Format (CEF) logs to Syslog agent"", ""description"": ""1. Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.\n> 2. Log onto the Illusive Console, and navigate to Settings->Reporting.\n> 3. Find Syslog Servers\n> 4. Supply the following information:\n>> 1. Host name: Linux Syslog agent IP address or FQDN host name\n>> 2. Port: 514\n>> 3. Protocol: TCP\n>> 4. Audit messages: Send audit messages to server\n> 5. To add the syslog server, click Add.\n> 6. For more information about how to add a new syslog server in the Illusive platform, please find the Illusive Networks Admin Guide in here: https://support.illusivenetworks.com/hc/en-us/sections/360002292119-Documentation-by-Version""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illusive%20Platform/Data%20Connectors/template_IllusivePlatformAMA.json","true" +"","Images","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Images","","","","","","","","","","","","","","","","","","false" +"CommonSecurityLog","Imperva WAF Gateway","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Imperva%20WAF%20Gateway","imperva","Imperva_WAF_Gateway_MSS","2022-05-02","","","Imperva","Partner","https://www.imperva.com/support/technical-support/","","domains","ImpervaWAFGateway","Imperva","Imperva WAF Gateway","The [Imperva](https://www.imperva.com) connector will allow you to quickly connect your Imperva WAF Gateway alerts to Azure Sentinel. This provides you additional insight into your organization's WAF traffic and improves your security operation capabilities.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Azure Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Azure Sentinel will use as the proxy between your security solution and Azure Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Azure Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""3. SecureSphere MX Configuration"", ""description"": ""This connector requires an Action Interface and Action Set to be created on the Imperva SecureSphere MX. [Follow the steps](https://community.imperva.com/blogs/craig-burlingame1/2020/11/13/steps-for-enabling-imperva-waf-gateway-alert) to create the requirements."", ""innerSteps"": [{""title"": ""3.1 Create the Action Interface"", ""description"": ""Create a new Action Interface that contains the required parameters to send WAF alerts to Azure Sentinel.""}, {""title"": ""3.2 Create the Action Set "", ""description"": ""Create a new Action Set that uses the Action Interface configured.""}, {""title"": ""3.3 Apply the Action Set"", ""description"": ""Apply the Action Set to any Security Policies you wish to have alerts for sent to Azure Sentinel.""}]}, {""title"": ""4. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n"", ""innerSteps"": [{""title"": ""4.1 Check for logs in the past 5 minutes using the following command.\n\nCommonSecurityLog | where DeviceVendor == \""Imperva Inc.\"" | where DeviceProduct == \""WAF Gateway\"" | where TimeGenerated == ago(5m)""}]}, {""title"": ""5. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Imperva%20WAF%20Gateway/Data%20Connectors/Connector_Imperva_WAF_Gateway.json","true" +"ImpervaWAFCloudV2_CL","ImpervaCloudWAF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ImpervaCloudWAF","azuresentinel","azure-sentinel-solution-impervawafcloud","2021-09-28","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ImpervaCloudWAFLogsCCFDefinition","Microsoft","Imperva Cloud WAF","The Imperva WAF Cloud data connector provides the capability to ingest logs into Microsoft Sentinel using the Imperva Log Integration through AWS S3 Bucket. Refer to [Imperva WAF Cloud Log Integration](https://docs.imperva.com/bundle/cloud-application-security/page/settings/log-integration.htm) for more information.","[{""title"": ""Connect Imperva WAF Cloud to Microsoft Sentinel\n\n"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** This connector fetches the Imperva Cloud WAF logs from AWS S3 bucket""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""To gather data from Imperva, you need to configure the following resources""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS Role ARN \n To gather data from Imperva, you'll need AWS Role ARN.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. AWS SQS Queue URL \n To gather data from Imperva, you'll need AWS SQS Queue URL.\n\n""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""For detailed steps to retrieve the AWS Role ARN, SQS Queue URL, and configure Imperva log forwarding to the Amazon S3 bucket, refer to the [Connector Setup Guide](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ImpervaCloudWAF/Data%20Connectors/Readme.md).""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""AWS Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""AWS SQS Queue URL""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""placeholder"": ""Enter Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""placeholder"": ""Enter SQL Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""required"": true}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ImpervaCloudWAF/Data%20Connectors/ImpervaCloudWAFLogs_ccf/ImpervaCloudWAFLogs_ConnectorDefinition.json","true" +"ImpervaWAFCloud_CL","ImpervaCloudWAF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ImpervaCloudWAF","azuresentinel","azure-sentinel-solution-impervawafcloud","2021-09-28","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ImpervaWAFCloudAPI","Imperva","Imperva Cloud WAF","The [Imperva Cloud WAF](https://www.imperva.com/resources/resource-library/datasheets/imperva-cloud-waf/) data connector provides the capability to integrate and ingest Web Application Firewall events into Microsoft Sentinel through the REST API. Refer to Log integration [documentation](https://docs.imperva.com/bundle/cloud-application-security/page/settings/log-integration.htm#Download) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Imperva Cloud API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""description"": "">**NOTE:**This data connector depends on a parser based on a Kusto Function to work as expected [**ImpervaWAFCloud**](https://aka.ms/sentinel-impervawafcloud-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Log Integration**\n\n [Follow the instructions](https://docs.imperva.com/bundle/cloud-application-security/page/settings/log-integration.htm#Setuplogintegration) to obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Imperva Cloud WAF data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-impervawafcloud-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **ImpervaAPIID**, **ImpervaAPIKey**, **ImpervaLogServerURI** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Imperva Cloud WAF data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-impervawafcloud-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ImpervaCloudXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tImpervaAPIID\n\t\tImpervaAPIKey\n\t\tImpervaLogServerURI\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**ImpervaAPIID**, **ImpervaAPIKey**, **ImpervaLogServerURI** are required for the API. [See the documentation to learn more about Setup Log Integration process](https://docs.imperva.com/bundle/cloud-application-security/page/settings/log-integration.htm#Setuplogintegration). Check all [requirements and follow the instructions](https://docs.imperva.com/bundle/cloud-application-security/page/settings/log-integration.htm#Setuplogintegration) for obtaining credentials. Please note that this connector uses CEF log event format. [More information](https://docs.imperva.com/bundle/cloud-application-security/page/more/log-file-structure.htm#Logfilestructure) about log format.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ImpervaCloudWAF/Data%20Connectors/ImpervaWAFCloud_FunctionApp.json","true" +"Failed_Range_To_Ingest_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" +"Infoblox_Failed_Indicators_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" +"dossier_atp_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" +"dossier_atp_threat_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" +"dossier_dns_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" +"dossier_geo_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" +"dossier_infoblox_web_cat_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" +"dossier_inforank_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" +"dossier_malware_analysis_v3_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" +"dossier_nameserver_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" +"dossier_nameserver_matches_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" +"dossier_ptr_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" +"dossier_rpz_feeds_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" +"dossier_rpz_feeds_records_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" +"dossier_threat_actor_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" +"dossier_tld_risk_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" +"dossier_whitelist_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" +"dossier_whois_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" +"CommonSecurityLog","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxSOCInsightsDataConnector_AMA","Infoblox","[Recommended] Infoblox SOC Insight Data Connector via AMA","The Infoblox SOC Insight Data Connector allows you to easily connect your Infoblox BloxOne SOC Insight data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.

This data connector ingests Infoblox SOC Insight CDC logs into your Log Analytics Workspace using the new Azure Monitor Agent. Learn more about ingesting using the new Azure Monitor Agent [here](https://learn.microsoft.com/azure/sentinel/connect-cef-ama). **Microsoft recommends using this Data Connector.**","[{""title"": ""Workspace Keys"", ""description"": ""In order to use the playbooks as part of this solution, find your **Workspace ID** and **Workspace Primary Key** below for your convenience."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Workspace Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Parsers"", ""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected called [**InfobloxCDC_SOCInsights**](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights/Parsers/InfobloxCDC_SOCInsights.yaml) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": ""SOC Insights"", ""description"": "">This data connector assumes you have access to Infoblox BloxOne Threat Defense SOC Insights. You can find more information about SOC Insights [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/501514252/SOC+Insights).""}, {""title"": ""Infoblox Cloud Data Connector"", ""description"": "">This data connector assumes an Infoblox Data Connector host has already been created and configured in the Infoblox Cloud Services Portal (CSP). As the [**Infoblox Data Connector**](https://docs.infoblox.com/display/BloxOneThreatDefense/Deploying+the+Data+Connector+Solution) is a feature of BloxOne Threat Defense, access to an appropriate BloxOne Threat Defense subscription is required. See this [**quick-start guide**](https://www.infoblox.com/wp-content/uploads/infoblox-deployment-guide-data-connector.pdf) for more information and licensing requirements."", ""instructions"": [{""parameters"": {""title"": ""Follow the steps below to configure this data connector"", ""instructionSteps"": [{""title"": ""A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note: CEF logs are collected only from Linux Agents_\n\n1. Navigate to your **Microsoft Sentinel workspace > Data connectors** blade.\n\n2. Search for the **Common Event Format (CEF) via AMA** data connector and open it.\n\n3. Ensure there is no existing DCR configured to collect required facility of logs as it may cause log duplication. Create a new **DCR (Data Collection Rule)**.\n\n\t_Note: It is recommended to install the AMA agent v1.27 at minimum. [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplication._\n\n4. Run the command provided in the **Common Event Format (CEF) via AMA** data connector page to configure the CEF collector on the machine.""}, {""title"": ""B. Within the Infoblox Cloud Services Portal, configure Infoblox BloxOne to send CEF Syslog data to the Infoblox Cloud Data Connector to forward to the Syslog agent"", ""description"": ""Follow the steps below to configure the Infoblox CDC to send BloxOne data to Microsoft Sentinel via the Linux Syslog agent.\n1. Navigate to **Manage > Data Connector**.\n2. Click the **Destination Configuration** tab at the top.\n3. Click **Create > Syslog**. \n - **Name**: Give the new Destination a meaningful **name**, such as **Microsoft-Sentinel-Destination**.\n - **Description**: Optionally give it a meaningful **description**.\n - **State**: Set the state to **Enabled**.\n - **Format**: Set the format to **CEF**.\n - **FQDN/IP**: Enter the IP address of the Linux device on which the Linux agent is installed.\n - **Port**: Leave the port number at **514**.\n - **Protocol**: Select desired protocol and CA certificate if applicable.\n - Click **Save & Close**.\n4. Click the **Traffic Flow Configuration** tab at the top.\n5. Click **Create**.\n - **Name**: Give the new Traffic Flow a meaningful **name**, such as **Microsoft-Sentinel-Flow**.\n - **Description**: Optionally give it a meaningful **description**. \n - **State**: Set the state to **Enabled**. \n - Expand the **Service Instance** section. \n - **Service Instance**: Select your desired Service Instance for which the Data Connector service is enabled. \n - Expand the **Source Configuration** section. \n - **Source**: Select **BloxOne Cloud Source**. \n - Select the **Internal Notifications** Log Type.\n - Expand the **Destination Configuration** section. \n - Select the **Destination** you just created. \n - Click **Save & Close**. \n6. Allow the configuration some time to activate.""}, {""title"": ""C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed. [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxSOCInsights/InfobloxSOCInsightsDataConnector_AMA.json","true" +"InfobloxInsight_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxSOCInsightsDataConnector_API","Infoblox","Infoblox SOC Insight Data Connector via REST API","The Infoblox SOC Insight Data Connector allows you to easily connect your Infoblox BloxOne SOC Insight data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": ""Workspace Keys"", ""description"": ""In order to use the playbooks as part of this solution, find your **Workspace ID** and **Workspace Primary Key** below for your convenience."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Workspace Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Parsers"", ""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected called [**InfobloxInsight**](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights/Parsers/InfobloxInsight.yaml) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": ""SOC Insights"", ""description"": "">This data connector assumes you have access to Infoblox BloxOne Threat Defense SOC Insights. You can find more information about SOC Insights [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/501514252/SOC+Insights).""}, {""title"": ""Follow the steps below to configure this data connector"", ""description"": """", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""1. Generate an Infoblox API Key and copy it somewhere safe"", ""description"": ""In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": ""2. Configure the Infoblox-SOC-Get-Open-Insights-API playbook"", ""description"": ""Create and configure the **Infoblox-SOC-Get-Open-Insights-API** playbook which is deployed with this solution. Enter your Infoblox API key in the appropriate parameter when prompted.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxSOCInsights/InfobloxSOCInsightsDataConnector_API.json","true" +"CommonSecurityLog","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxSOCInsightsDataConnector_Legacy","Infoblox","[Deprecated] Infoblox SOC Insight Data Connector via Legacy Agent","The Infoblox SOC Insight Data Connector allows you to easily connect your Infoblox BloxOne SOC Insight data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.

This data connector ingests Infoblox SOC Insight CDC logs into your Log Analytics Workspace using the legacy Log Analytics agent.

**Microsoft recommends installation of Infoblox SOC Insight Data Connector via AMA Connector.** The legacy connector uses the Log Analytics agent which is about to be deprecated by **Aug 31, 2024,** and should only be installed where AMA is not supported.

Using MMA and AMA on the same machine can cause log duplication and extra ingestion cost. [More details](https://learn.microsoft.com/en-us/azure/sentinel/ama-migrate).","[{""title"": ""Workspace Keys"", ""description"": ""In order to use the playbooks as part of this solution, find your **Workspace ID** and **Workspace Primary Key** below for your convenience."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Workspace Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Parsers"", ""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected called [**InfobloxCDC_SOCInsights**](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights/Parsers/InfobloxCDC_SOCInsights.yaml) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""SOC Insights"", ""description"": "">This data connector assumes you have access to Infoblox BloxOne Threat Defense SOC Insights. You can find more information about SOC Insights [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/501514252/SOC+Insights). "", ""instructions"": []}, {""title"": ""Infoblox Cloud Data Connector"", ""description"": "">This data connector assumes an Infoblox Data Connector host has already been created and configured in the Infoblox Cloud Services Portal (CSP). As the [**Infoblox Data Connector**](https://docs.infoblox.com/display/BloxOneThreatDefense/Deploying+the+Data+Connector+Solution) is a feature of BloxOne Threat Defense, access to an appropriate BloxOne Threat Defense subscription is required. See this [**quick-start guide**](https://www.infoblox.com/wp-content/uploads/infoblox-deployment-guide-data-connector.pdf) for more information and licensing requirements."", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Within the Infoblox Cloud Services Portal, configure Infoblox BloxOne to send CEF Syslog data to the Infoblox Cloud Data Connector to forward to the Syslog agent"", ""description"": ""Follow the steps below to configure the Infoblox CDC to send BloxOne data to Microsoft Sentinel via the Linux Syslog agent.\n1. Navigate to **Manage > Data Connector**.\n2. Click the **Destination Configuration** tab at the top.\n3. Click **Create > Syslog**. \n - **Name**: Give the new Destination a meaningful **name**, such as **Microsoft-Sentinel-Destination**.\n - **Description**: Optionally give it a meaningful **description**.\n - **State**: Set the state to **Enabled**.\n - **Format**: Set the format to **CEF**.\n - **FQDN/IP**: Enter the IP address of the Linux device on which the Linux agent is installed.\n - **Port**: Leave the port number at **514**.\n - **Protocol**: Select desired protocol and CA certificate if applicable.\n - Click **Save & Close**.\n4. Click the **Traffic Flow Configuration** tab at the top.\n5. Click **Create**.\n - **Name**: Give the new Traffic Flow a meaningful **name**, such as **Microsoft-Sentinel-Flow**.\n - **Description**: Optionally give it a meaningful **description**. \n - **State**: Set the state to **Enabled**. \n - Expand the **Service Instance** section. \n - **Service Instance**: Select your desired Service Instance for which the Data Connector service is enabled. \n - Expand the **Source Configuration** section. \n - **Source**: Select **BloxOne Cloud Source**. \n - Select the **Internal Notifications** Log Type.\n - Expand the **Destination Configuration** section. \n - Select the **Destination** you just created. \n - Click **Save & Close**. \n6. Allow the configuration some time to activate.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxSOCInsights/InfobloxSOCInsightsDataConnector_Legacy.json","true" +"CommonSecurityLog","Infoblox Cloud Data Connector","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20Cloud%20Data%20Connector","infoblox","infoblox-cdc-solution","2021-10-20","","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxCloudDataConnector","Infoblox","[Deprecated] Infoblox Cloud Data Connector via Legacy Agent","The Infoblox Cloud Data Connector allows you to easily connect your Infoblox BloxOne data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**IMPORTANT:** This data connector depends on a parser based on a Kusto Function to work as expected called [**InfobloxCDC**](https://aka.ms/sentinel-InfobloxCloudDataConnector-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** This Microsoft Sentinel data connector assumes an Infoblox Data Connector host has already been created and configured in the Infoblox Cloud Services Portal (CSP). As the [**Infoblox Data Connector**](https://docs.infoblox.com/display/BloxOneThreatDefense/Deploying+the+Data+Connector+Solution) is a feature of BloxOne Threat Defense, access to an appropriate BloxOne Threat Defense subscription is required. See this [**quick-start guide**](https://www.infoblox.com/wp-content/uploads/infoblox-deployment-guide-data-connector.pdf) for more information and licensing requirements."", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Configure Infoblox BloxOne to send Syslog data to the Infoblox Cloud Data Connector to forward to the Syslog agent"", ""description"": ""Follow the steps below to configure the Infoblox CDC to send BloxOne data to Microsoft Sentinel via the Linux Syslog agent.\n1. Navigate to **Manage > Data Connector**.\n2. Click the **Destination Configuration** tab at the top.\n3. Click **Create > Syslog**. \n - **Name**: Give the new Destination a meaningful **name**, such as **Microsoft-Sentinel-Destination**.\n - **Description**: Optionally give it a meaningful **description**.\n - **State**: Set the state to **Enabled**.\n - **Format**: Set the format to **CEF**.\n - **FQDN/IP**: Enter the IP address of the Linux device on which the Linux agent is installed.\n - **Port**: Leave the port number at **514**.\n - **Protocol**: Select desired protocol and CA certificate if applicable.\n - Click **Save & Close**.\n4. Click the **Traffic Flow Configuration** tab at the top.\n5. Click **Create**.\n - **Name**: Give the new Traffic Flow a meaningful **name**, such as **Microsoft-Sentinel-Flow**.\n - **Description**: Optionally give it a meaningful **description**. \n - **State**: Set the state to **Enabled**. \n - Expand the **Service Instance** section. \n - **Service Instance**: Select your desired Service Instance for which the Data Connector service is enabled. \n - Expand the **Source Configuration** section. \n - **Source**: Select **BloxOne Cloud Source**. \n - Select all desired **log types** you wish to collect. Currently supported log types are:\n - Threat Defense Query/Response Log\n - Threat Defense Threat Feeds Hits Log\n - DDI Query/Response Log\n - DDI DHCP Lease Log\n - Expand the **Destination Configuration** section. \n - Select the **Destination** you just created. \n - Click **Save & Close**. \n6. Allow the configuration some time to activate.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20Cloud%20Data%20Connector/Data%20Connectors/InfobloxCloudDataConnector.json","true" +"CommonSecurityLog","Infoblox Cloud Data Connector","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20Cloud%20Data%20Connector","infoblox","infoblox-cdc-solution","2021-10-20","","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxCloudDataConnectorAma","Infoblox","[Deprecated] Infoblox Cloud Data Connector via AMA","The Infoblox Cloud Data Connector allows you to easily connect your Infoblox BloxOne data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**IMPORTANT:** This data connector depends on a parser based on a Kusto Function to work as expected called [**InfobloxCDC**](https://aka.ms/sentinel-InfobloxCloudDataConnector-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** This Microsoft Sentinel data connector assumes an Infoblox Data Connector host has already been created and configured in the Infoblox Cloud Services Portal (CSP). As the [**Infoblox Data Connector**](https://docs.infoblox.com/display/BloxOneThreatDefense/Deploying+the+Data+Connector+Solution) is a feature of BloxOne Threat Defense, access to an appropriate BloxOne Threat Defense subscription is required. See this [**quick-start guide**](https://www.infoblox.com/wp-content/uploads/infoblox-deployment-guide-data-connector.pdf) for more information and licensing requirements."", ""instructions"": [{""parameters"": {""title"": ""1. Follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note: CEF logs are collected only from Linux Agents_\n\n1. Navigate to your **Microsoft Sentinel workspace > Data connectors** blade.\n\n2. Search for the **Common Event Format (CEF) via AMA** data connector and open it.\n\n3. Ensure there is no existing DCR configured to collect required facility of logs as it may cause log duplication. Create a new **DCR (Data Collection Rule)**.\n\n\t_Note: It is recommended to install the AMA agent v1.27 at minimum. [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplication._\n\n4. Run the command provided in the **CEF via AMA data connector** page to configure the CEF collector on the machine.""}, {""title"": ""Step B. Configure Infoblox BloxOne to send Syslog data to the Infoblox Cloud Data Connector to forward to the Syslog agent"", ""description"": ""Follow the steps below to configure the Infoblox CDC to send BloxOne data to Microsoft Sentinel via the Linux Syslog agent.\n1. Navigate to **Manage > Data Connector**.\n2. Click the **Destination Configuration** tab at the top.\n3. Click **Create > Syslog**. \n - **Name**: Give the new Destination a meaningful **name**, such as **Microsoft-Sentinel-Destination**.\n - **Description**: Optionally give it a meaningful **description**.\n - **State**: Set the state to **Enabled**.\n - **Format**: Set the format to **CEF**.\n - **FQDN/IP**: Enter the IP address of the Linux device on which the Linux agent is installed.\n - **Port**: Leave the port number at **514**.\n - **Protocol**: Select desired protocol and CA certificate if applicable.\n - Click **Save & Close**.\n4. Click the **Traffic Flow Configuration** tab at the top.\n5. Click **Create**.\n - **Name**: Give the new Traffic Flow a meaningful **name**, such as **Microsoft-Sentinel-Flow**.\n - **Description**: Optionally give it a meaningful **description**. \n - **State**: Set the state to **Enabled**. \n - Expand the **Service Instance** section. \n - **Service Instance**: Select your desired Service Instance for which the Data Connector service is enabled. \n - Expand the **Source Configuration** section. \n - **Source**: Select **BloxOne Cloud Source**. \n - Select all desired **log types** you wish to collect. Currently supported log types are:\n - Threat Defense Query/Response Log\n - Threat Defense Threat Feeds Hits Log\n - DDI Query/Response Log\n - DDI DHCP Lease Log\n - Expand the **Destination Configuration** section. \n - Select the **Destination** you just created. \n - Click **Save & Close**. \n6. Allow the configuration some time to activate.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20Cloud%20Data%20Connector/Data%20Connectors/template_InfobloxCloudDataConnectorAMA.json","true" +"Syslog","Infoblox NIOS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20NIOS","azuresentinel","azure-sentinel-solution-infobloxnios","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","InfobloxNIOS","Infoblox","[Deprecated] Infoblox NIOS","The [Infoblox Network Identity Operating System (NIOS)](https://www.infoblox.com/glossary/network-identity-operating-system-nios/) connector allows you to easily connect your Infoblox NIOS logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Infoblox and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20NIOS/Parser/Infoblox.yaml), on the second line of the query, enter any unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n 2. Select **Apply below configuration to my machines** and select the facilities and severities.\n 3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure and connect the Infoblox NIOS"", ""description"": ""[Follow these instructions](https://www.infoblox.com/wp-content/uploads/infoblox-deployment-guide-slog-and-snmp-configuration-for-nios.pdf) to enable syslog forwarding of Infoblox NIOS Logs. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}, {""title"": ""4. Configure the Sentinel parser"", ""description"": ""Update the watchlist 'Sources_by_Source' with the hostname(s) of your Infoblox device(s). Set SourceType to 'InfobloxNIOS' and Source to the value of 'Computer' seen in the logs seen in Syslog table.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Infoblox NIOS"", ""description"": ""must be configured to export logs via Syslog""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20NIOS/Data%20Connectors/Connector_Syslog_Infoblox.json","true" +"CommonSecurityLog","Infoblox SOC Insights","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights","infoblox","infoblox-soc-insight-solution","2024-03-06","","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxSOCInsightsDataConnector_AMA","Infoblox","[Deprecated] Infoblox SOC Insight Data Connector via AMA","The Infoblox SOC Insight Data Connector allows you to easily connect your Infoblox BloxOne SOC Insight data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.

This data connector ingests Infoblox SOC Insight CDC logs into your Log Analytics Workspace using the new Azure Monitor Agent. Learn more about ingesting using the new Azure Monitor Agent [here](https://learn.microsoft.com/azure/sentinel/connect-cef-ama). **Microsoft recommends using this Data Connector.**","[{""title"": ""Workspace Keys"", ""description"": ""In order to use the playbooks as part of this solution, find your **Workspace ID** and **Workspace Primary Key** below for your convenience."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Workspace Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Parsers"", ""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected called [**InfobloxCDC_SOCInsights**](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights/Parsers/InfobloxCDC_SOCInsights.yaml) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": ""SOC Insights"", ""description"": "">This data connector assumes you have access to Infoblox BloxOne Threat Defense SOC Insights. You can find more information about SOC Insights [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/501514252/SOC+Insights).""}, {""title"": ""Infoblox Cloud Data Connector"", ""description"": "">This data connector assumes an Infoblox Data Connector host has already been created and configured in the Infoblox Cloud Services Portal (CSP). As the [**Infoblox Data Connector**](https://docs.infoblox.com/display/BloxOneThreatDefense/Deploying+the+Data+Connector+Solution) is a feature of BloxOne Threat Defense, access to an appropriate BloxOne Threat Defense subscription is required. See this [**quick-start guide**](https://www.infoblox.com/wp-content/uploads/infoblox-deployment-guide-data-connector.pdf) for more information and licensing requirements."", ""instructions"": [{""parameters"": {""title"": ""Follow the steps below to configure this data connector"", ""instructionSteps"": [{""title"": ""A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note: CEF logs are collected only from Linux Agents_\n\n1. Navigate to your **Microsoft Sentinel workspace > Data connectors** blade.\n\n2. Search for the **Common Event Format (CEF) via AMA** data connector and open it.\n\n3. Ensure there is no existing DCR configured to collect required facility of logs as it may cause log duplication. Create a new **DCR (Data Collection Rule)**.\n\n\t_Note: It is recommended to install the AMA agent v1.27 at minimum. [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplication._\n\n4. Run the command provided in the **Common Event Format (CEF) via AMA** data connector page to configure the CEF collector on the machine.""}, {""title"": ""B. Within the Infoblox Cloud Services Portal, configure Infoblox BloxOne to send CEF Syslog data to the Infoblox Cloud Data Connector to forward to the Syslog agent"", ""description"": ""Follow the steps below to configure the Infoblox CDC to send BloxOne data to Microsoft Sentinel via the Linux Syslog agent.\n1. Navigate to **Manage > Data Connector**.\n2. Click the **Destination Configuration** tab at the top.\n3. Click **Create > Syslog**. \n - **Name**: Give the new Destination a meaningful **name**, such as **Microsoft-Sentinel-Destination**.\n - **Description**: Optionally give it a meaningful **description**.\n - **State**: Set the state to **Enabled**.\n - **Format**: Set the format to **CEF**.\n - **FQDN/IP**: Enter the IP address of the Linux device on which the Linux agent is installed.\n - **Port**: Leave the port number at **514**.\n - **Protocol**: Select desired protocol and CA certificate if applicable.\n - Click **Save & Close**.\n4. Click the **Traffic Flow Configuration** tab at the top.\n5. Click **Create**.\n - **Name**: Give the new Traffic Flow a meaningful **name**, such as **Microsoft-Sentinel-Flow**.\n - **Description**: Optionally give it a meaningful **description**. \n - **State**: Set the state to **Enabled**. \n - Expand the **Service Instance** section. \n - **Service Instance**: Select your desired Service Instance for which the Data Connector service is enabled. \n - Expand the **Source Configuration** section. \n - **Source**: Select **BloxOne Cloud Source**. \n - Select the **Internal Notifications** Log Type.\n - Expand the **Destination Configuration** section. \n - Select the **Destination** you just created. \n - Click **Save & Close**. \n6. Allow the configuration some time to activate.""}, {""title"": ""C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed. [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights/Data%20Connectors/InfobloxSOCInsightsDataConnector_AMA.json","true" +"InfobloxInsight_CL","Infoblox SOC Insights","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights","infoblox","infoblox-soc-insight-solution","2024-03-06","","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxSOCInsightsDataConnector_API","Infoblox","Infoblox SOC Insight Data Connector via REST API","The Infoblox SOC Insight Data Connector allows you to easily connect your Infoblox BloxOne SOC Insight data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": ""Workspace Keys"", ""description"": ""In order to use the playbooks as part of this solution, find your **Workspace ID** and **Workspace Primary Key** below for your convenience."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Workspace Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Parsers"", ""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected called [**InfobloxInsight**](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights/Parsers/InfobloxInsight.yaml) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""SOC Insights"", ""description"": "">This data connector assumes you have access to Infoblox BloxOne Threat Defense SOC Insights. You can find more information about SOC Insights [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/501514252/SOC+Insights)."", ""instructions"": []}, {""title"": ""Follow the steps below to configure this data connector"", ""description"": """", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""1. Generate an Infoblox API Key and copy it somewhere safe"", ""description"": ""In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F)."", ""instructions"": []}, {""title"": ""2. Configure the Infoblox-SOC-Get-Open-Insights-API playbook"", ""description"": ""Create and configure the **Infoblox-SOC-Get-Open-Insights-API** playbook which is deployed with this solution. Enter your Infoblox API key in the appropriate parameter when prompted."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights/Data%20Connectors/InfobloxSOCInsightsDataConnector_API.json","true" +"CommonSecurityLog","Infoblox SOC Insights","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights","infoblox","infoblox-soc-insight-solution","2024-03-06","","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxSOCInsightsDataConnector_Legacy","Infoblox","[Deprecated] Infoblox SOC Insight Data Connector via Legacy Agent","The Infoblox SOC Insight Data Connector allows you to easily connect your Infoblox BloxOne SOC Insight data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.

This data connector ingests Infoblox SOC Insight CDC logs into your Log Analytics Workspace using the legacy Log Analytics agent.

**Microsoft recommends installation of Infoblox SOC Insight Data Connector via AMA Connector.** The legacy connector uses the Log Analytics agent which is about to be deprecated by **Aug 31, 2024,** and should only be installed where AMA is not supported.

Using MMA and AMA on the same machine can cause log duplication and extra ingestion cost. [More details](https://learn.microsoft.com/en-us/azure/sentinel/ama-migrate).","[{""title"": ""Workspace Keys"", ""description"": ""In order to use the playbooks as part of this solution, find your **Workspace ID** and **Workspace Primary Key** below for your convenience."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Workspace Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Parsers"", ""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected called [**InfobloxCDC_SOCInsights**](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights/Parsers/InfobloxCDC_SOCInsights.yaml) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""SOC Insights"", ""description"": "">This data connector assumes you have access to Infoblox BloxOne Threat Defense SOC Insights. You can find more information about SOC Insights [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/501514252/SOC+Insights). "", ""instructions"": []}, {""title"": ""Infoblox Cloud Data Connector"", ""description"": "">This data connector assumes an Infoblox Data Connector host has already been created and configured in the Infoblox Cloud Services Portal (CSP). As the [**Infoblox Data Connector**](https://docs.infoblox.com/display/BloxOneThreatDefense/Deploying+the+Data+Connector+Solution) is a feature of BloxOne Threat Defense, access to an appropriate BloxOne Threat Defense subscription is required. See this [**quick-start guide**](https://www.infoblox.com/wp-content/uploads/infoblox-deployment-guide-data-connector.pdf) for more information and licensing requirements."", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Within the Infoblox Cloud Services Portal, configure Infoblox BloxOne to send CEF Syslog data to the Infoblox Cloud Data Connector to forward to the Syslog agent"", ""description"": ""Follow the steps below to configure the Infoblox CDC to send BloxOne data to Microsoft Sentinel via the Linux Syslog agent.\n1. Navigate to **Manage > Data Connector**.\n2. Click the **Destination Configuration** tab at the top.\n3. Click **Create > Syslog**. \n - **Name**: Give the new Destination a meaningful **name**, such as **Microsoft-Sentinel-Destination**.\n - **Description**: Optionally give it a meaningful **description**.\n - **State**: Set the state to **Enabled**.\n - **Format**: Set the format to **CEF**.\n - **FQDN/IP**: Enter the IP address of the Linux device on which the Linux agent is installed.\n - **Port**: Leave the port number at **514**.\n - **Protocol**: Select desired protocol and CA certificate if applicable.\n - Click **Save & Close**.\n4. Click the **Traffic Flow Configuration** tab at the top.\n5. Click **Create**.\n - **Name**: Give the new Traffic Flow a meaningful **name**, such as **Microsoft-Sentinel-Flow**.\n - **Description**: Optionally give it a meaningful **description**. \n - **State**: Set the state to **Enabled**. \n - Expand the **Service Instance** section. \n - **Service Instance**: Select your desired Service Instance for which the Data Connector service is enabled. \n - Expand the **Source Configuration** section. \n - **Source**: Select **BloxOne Cloud Source**. \n - Select the **Internal Notifications** Log Type.\n - Expand the **Destination Configuration** section. \n - Select the **Destination** you just created. \n - Click **Save & Close**. \n6. Allow the configuration some time to activate.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights/Data%20Connectors/InfobloxSOCInsightsDataConnector_Legacy.json","true" +"","InsightVM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/InsightVM","","","","","","","","","","","","","","","","","","false" +"atlassian_beacon_alerts_CL","Integration for Atlassian Beacon","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Integration%20for%20Atlassian%20Beacon","defendlimited1682894612656","microsoft-sentinel-solution-atlassian-beacon","2023-09-22","","","DEFEND Ltd.","Partner","https://www.defend.co.nz/","","domains","AtlassianBeaconAlerts","DEFEND Ltd.","Atlassian Beacon Alerts","Atlassian Beacon is a cloud product that is built for Intelligent threat detection across the Atlassian platforms (Jira, Confluence, and Atlassian Admin). This can help users detect, investigate and respond to risky user activity for the Atlassian suite of products. The solution is a custom data connector from DEFEND Ltd. that is used to visualize the alerts ingested from Atlassian Beacon to Microsoft Sentinel via a Logic App.","[{""description"": "">1. Navigate to the newly installed Logic App 'Atlassian Beacon Integration'\n\n>2. Navigate to 'Logic app designer'\n\n>3. Expand the 'When a HTTP request is received'\n\n>4. Copy the 'HTTP POST URL'"", ""title"": ""1. Microsoft Sentinel""}, {""description"": "">1. Login to Atlassian Beacon using an admin account\n\n>2. Navigate to 'SIEM forwarding' under SETTINGS\n\n> 3. Paste the copied URL from Logic App in the text box\n\n> 4. Click the 'Save' button"", ""title"": ""2. Atlassian Beacon""}, {""description"": "">1. Login to Atlassian Beacon using an admin account\n\n>2. Navigate to 'SIEM forwarding' under SETTINGS\n\n> 3. Click the 'Test' button right next to the newly configured webhook\n\n> 4. Navigate to Microsoft Sentinel\n\n> 5. Navigate to the newly installed Logic App\n\n> 6. Check for the Logic App Run under 'Runs history'\n\n> 7. Check for logs under the table name 'atlassian_beacon_alerts_CL' in 'Logs'\n\n> 8. If the analytic rule has been enabled, the above Test alert should have created an incident in Microsoft Sentinel"", ""title"": ""3. Testing and Validation""}]","{""resourceProvider"": [{""permissionsDisplayText"": ""read and write permissions are required."", ""provider"": ""Microsoft.OperationalInsights/workspaces"", ""providerDisplayName"": ""Workspace"", ""requiredPermissions"": {""delete"": true, ""read"": true, ""write"": true}, ""scope"": ""Workspace""}, {""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""providerDisplayName"": ""Keys"", ""requiredPermissions"": {""action"": true}, ""scope"": ""Workspace""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Integration%20for%20Atlassian%20Beacon/Data%20Connectors/AtlassianBeacon_DataConnector.json","true" +"","Intel471","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Intel471","intel471inc1641226539011","microsoft-sentinel-solution-intel471","2023-06-21","","","Intel 471","Partner","https://intel471.com/company/contact","","domains","","","","","","","","false" +"","IoTOTThreatMonitoringwithDefenderforIoT","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IoTOTThreatMonitoringwithDefenderforIoT","azuresentinel","azure-sentinel-solution-unifiedmicrosoftsocforot","2021-10-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","IronNet IronDefense","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IronNet%20IronDefense","ironnetcybersecurity1585849518753","irondefense-for-sentinel","2021-10-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"Island_Admin_CL","Island","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Island","islandtechnologyinc1679434413850","island-sentinel-solution","2023-05-02","2023-07-20","","Island","Partner","https://www.island.io","","domains","Island_Admin_Polling","Island","Island Enterprise Browser Admin Audit (Polling CCP)","The [Island](https://www.island.io) Admin connector provides the capability to ingest Island Admin Audit logs into Microsoft Sentinel.","[{""title"": ""Connect Island to Microsoft Sentinel"", ""description"": ""Provide the Island API URL and Key. API URL is https://management.island.io/api/external/v1/adminActions for US or https://eu.management.island.io/api/external/v1/adminActions for EU.\n Generate the API Key in the Management Console under Settings > API."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""API URL"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{placeHolder1}}"", ""placeHolderValue"": """"}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true, ""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Island API Key"", ""description"": ""An Island API key is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Island/Data%20Connectors/IslandAdminAPIConnector.json","true" +"Island_User_CL","Island","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Island","islandtechnologyinc1679434413850","island-sentinel-solution","2023-05-02","2023-07-20","","Island","Partner","https://www.island.io","","domains","Island_User_Polling","Island","Island Enterprise Browser User Activity (Polling CCP)","The [Island](https://www.island.io) connector provides the capability to ingest Island User Activity logs into Microsoft Sentinel.","[{""title"": ""Connect Island to Microsoft Sentinel"", ""description"": ""Provide the Island API URL and Key. API URL is https://management.island.io/api/external/v1/timeline for US or https://eu.management.island.io/api/external/v1/timeline for EU.\n Generate the API Key in the Management Console under Settings > API."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""API URL"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{placeHolder1}}"", ""placeHolderValue"": """"}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true, ""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Island API Key"", ""description"": ""An Island API key is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Island/Data%20Connectors/IslandUserAPIConnector.json","true" +"Syslog","Ivanti Unified Endpoint Management","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Ivanti%20Unified%20Endpoint%20Management","azuresentinel","azure-sentinel-solution-ivantiuem","2022-07-05","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","IvantiUEM","Ivanti","[Deprecated] Ivanti Unified Endpoint Management","The [Ivanti Unified Endpoint Management](https://www.ivanti.com/products/unified-endpoint-manager) data connector provides the capability to ingest [Ivanti UEM Alerts](https://help.ivanti.com/ld/help/en_US/LDMS/11.0/Windows/alert-c-monitoring-overview.htm) into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**IvantiUEMEvent**](https://aka.ms/sentinel-ivantiuem-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using Ivanti Unified Endpoint Management Release 2021.1 Version 11.0.3.374"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Server where the Ivanti Unified Endpoint Management Alerts are forwarded.\n\n> Logs from Ivanti Unified Endpoint Management Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure Ivanti Unified Endpoint Management alert forwarding."", ""description"": ""[Follow the instructions](https://help.ivanti.com/ld/help/en_US/LDMS/11.0/Windows/alert-t-define-action.htm) to set up Alert Actions to send logs to syslog server.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Ivanti%20Unified%20Endpoint%20Management/Data%20Connectors/Ivanti_UEM_Syslog.json","true" +"JBossLogs_CL","JBoss","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/JBoss","azuresentinel","azure-sentinel-solution-jboss","2021-10-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","JBossEAP","Red Hat","[Deprecated] JBoss Enterprise Application Platform","The JBoss Enterprise Application Platform data connector provides the capability to ingest [JBoss](https://www.redhat.com/en/technologies/jboss-middleware/application-platform) events into Microsoft Sentinel. Refer to [Red Hat documentation](https://access.redhat.com/documentation/en-us/red_hat_jboss_enterprise_application_platform/7.0/html/configuration_guide/logging_with_jboss_eap) for more information.","[{""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**JBossEvent**](https://aka.ms/sentinel-jbosseap-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using JBoss Enterprise Application Platform 7.4.0."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the JBoss server where the logs are generated.\n\n> Logs from JBoss Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents.\n "", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the custom log directory to be collected"", ""instructions"": [{""parameters"": {""linkType"": ""OpenCustomLogsSettings""}, ""type"": ""InstallAgent""}]}, {""description"": ""1. Select the link above to open your workspace advanced settings \n2. Click **+Add custom**\n3. Click **Browse** to upload a sample of a JBoss log file (e.g. server.log). Then, click **Next >**\n4. Select **Timestamp** as the record delimiter and select Timestamp format **YYYY-MM-DD HH:MM:SS** from the dropdown list then click **Next >**\n5. Select **Windows** or **Linux** and enter the path to JBoss logs based on your configuration. Example:\n - **Linux** Directory:\n\n>Standalone server: EAP_HOME/standalone/log/server.log\n\n>Managed domain: EAP_HOME/domain/servers/SERVER_NAME/log/server.log\n\n6. After entering the path, click the '+' symbol to apply, then click **Next >** \n7. Add **JBossLogs** as the custom log Name and click **Done**""}, {""title"": ""3. Check logs in Microsoft Sentinel"", ""description"": ""Open Log Analytics to check if the logs are received using the JBossLogs_CL Custom log table.\n\n>**NOTE:** It may take up to 30 minutes before new logs will appear in JBossLogs_CL table."", ""instructions"": []}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/JBoss/Data%20Connectors/Connector_JBoss.json","true" +"jamfprotectalerts_CL","Jamf Protect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Jamf%20Protect","jamfsoftwareaustraliaptyltd1620360395539","jamf_protect","2022-10-10","2025-09-02","","Jamf Software, LLC","Partner","https://www.jamf.com/support/","","domains","JamfProtectPush","Jamf","Jamf Protect Push Connector","The [Jamf Protect](https://www.jamf.com/products/jamf-protect/) connector provides the capability to read raw event data from Jamf Protect in Microsoft Sentinel.","[{""title"": ""1. Create ARM Resources and Provide the Required Permissions"", ""description"": ""This connector reads data from the tables that Jamf Protect uses in a Microsoft Analytics Workspace, if the [data forwarding](https://docs.jamf.com/jamf-protect/documentation/Data_Forwarding_to_a_Third_Party_Storage_Solution.html?hl=sentinel#task-4227) option is enabled in Jamf Protect then raw event data is sent to the Microsoft Sentinel Ingestion API."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated Configuration and Secure Data Ingestion with Entra Application \nClicking on \""Deploy\"" will trigger the creation of Log Analytics tables and a Data Collection Rule (DCR). \nIt will then create an Entra application, link the DCR to it, and set the entered secret in the application. This setup enables data to be sent securely to the DCR using an Entra token.""}}, {""parameters"": {""label"": ""Deploy Jamf Protect connector resources"", ""applicationDisplayName"": ""Jamf Protect Connector Application""}, ""type"": ""DeployPushConnectorButton""}]}, {""title"": ""2. Push your logs into the workspace"", ""description"": ""Use the following parameters to configure the your machine to send the logs to the workspace."", ""instructions"": [{""parameters"": {""label"": ""Tenant ID (Directory ID)"", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Application ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the App Registration Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Secret"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the App Registration Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Endpoint Uri"", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the Data Collection Endpoint Uri""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Rule Immutable ID"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the Data Collection Rule Immutable ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Unified Logs Stream Name"", ""value"": ""Custom-jamfprotectunifiedlogs""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Telemetry Stream Name"", ""value"": ""Custom-jamfprotecttelemetryv2""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Alerts Stream Name"", ""value"": ""Custom-jamfprotectalerts""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rule (DCR). Typically requires Azure RBAC Owner or User Access Administrator role""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Jamf%20Protect/Data%20Connectors/JamfProtect_ccp/connectorDefinition.json","true" +"jamfprotecttelemetryv2_CL","Jamf Protect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Jamf%20Protect","jamfsoftwareaustraliaptyltd1620360395539","jamf_protect","2022-10-10","2025-09-02","","Jamf Software, LLC","Partner","https://www.jamf.com/support/","","domains","JamfProtectPush","Jamf","Jamf Protect Push Connector","The [Jamf Protect](https://www.jamf.com/products/jamf-protect/) connector provides the capability to read raw event data from Jamf Protect in Microsoft Sentinel.","[{""title"": ""1. Create ARM Resources and Provide the Required Permissions"", ""description"": ""This connector reads data from the tables that Jamf Protect uses in a Microsoft Analytics Workspace, if the [data forwarding](https://docs.jamf.com/jamf-protect/documentation/Data_Forwarding_to_a_Third_Party_Storage_Solution.html?hl=sentinel#task-4227) option is enabled in Jamf Protect then raw event data is sent to the Microsoft Sentinel Ingestion API."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated Configuration and Secure Data Ingestion with Entra Application \nClicking on \""Deploy\"" will trigger the creation of Log Analytics tables and a Data Collection Rule (DCR). \nIt will then create an Entra application, link the DCR to it, and set the entered secret in the application. This setup enables data to be sent securely to the DCR using an Entra token.""}}, {""parameters"": {""label"": ""Deploy Jamf Protect connector resources"", ""applicationDisplayName"": ""Jamf Protect Connector Application""}, ""type"": ""DeployPushConnectorButton""}]}, {""title"": ""2. Push your logs into the workspace"", ""description"": ""Use the following parameters to configure the your machine to send the logs to the workspace."", ""instructions"": [{""parameters"": {""label"": ""Tenant ID (Directory ID)"", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Application ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the App Registration Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Secret"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the App Registration Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Endpoint Uri"", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the Data Collection Endpoint Uri""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Rule Immutable ID"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the Data Collection Rule Immutable ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Unified Logs Stream Name"", ""value"": ""Custom-jamfprotectunifiedlogs""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Telemetry Stream Name"", ""value"": ""Custom-jamfprotecttelemetryv2""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Alerts Stream Name"", ""value"": ""Custom-jamfprotectalerts""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rule (DCR). Typically requires Azure RBAC Owner or User Access Administrator role""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Jamf%20Protect/Data%20Connectors/JamfProtect_ccp/connectorDefinition.json","true" +"jamfprotectunifiedlogs_CL","Jamf Protect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Jamf%20Protect","jamfsoftwareaustraliaptyltd1620360395539","jamf_protect","2022-10-10","2025-09-02","","Jamf Software, LLC","Partner","https://www.jamf.com/support/","","domains","JamfProtectPush","Jamf","Jamf Protect Push Connector","The [Jamf Protect](https://www.jamf.com/products/jamf-protect/) connector provides the capability to read raw event data from Jamf Protect in Microsoft Sentinel.","[{""title"": ""1. Create ARM Resources and Provide the Required Permissions"", ""description"": ""This connector reads data from the tables that Jamf Protect uses in a Microsoft Analytics Workspace, if the [data forwarding](https://docs.jamf.com/jamf-protect/documentation/Data_Forwarding_to_a_Third_Party_Storage_Solution.html?hl=sentinel#task-4227) option is enabled in Jamf Protect then raw event data is sent to the Microsoft Sentinel Ingestion API."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated Configuration and Secure Data Ingestion with Entra Application \nClicking on \""Deploy\"" will trigger the creation of Log Analytics tables and a Data Collection Rule (DCR). \nIt will then create an Entra application, link the DCR to it, and set the entered secret in the application. This setup enables data to be sent securely to the DCR using an Entra token.""}}, {""parameters"": {""label"": ""Deploy Jamf Protect connector resources"", ""applicationDisplayName"": ""Jamf Protect Connector Application""}, ""type"": ""DeployPushConnectorButton""}]}, {""title"": ""2. Push your logs into the workspace"", ""description"": ""Use the following parameters to configure the your machine to send the logs to the workspace."", ""instructions"": [{""parameters"": {""label"": ""Tenant ID (Directory ID)"", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Application ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the App Registration Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Secret"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the App Registration Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Endpoint Uri"", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the Data Collection Endpoint Uri""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Rule Immutable ID"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the Data Collection Rule Immutable ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Unified Logs Stream Name"", ""value"": ""Custom-jamfprotectunifiedlogs""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Telemetry Stream Name"", ""value"": ""Custom-jamfprotecttelemetryv2""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Alerts Stream Name"", ""value"": ""Custom-jamfprotectalerts""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rule (DCR). Typically requires Azure RBAC Owner or User Access Administrator role""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Jamf%20Protect/Data%20Connectors/JamfProtect_ccp/connectorDefinition.json","true" +"","Joshua-Cyberiskvision","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Joshua-Cyberiskvision","almavivaspa1636563933762","joshua-cyberiskvision","2022-01-10","2022-01-10","","Joshua Cyberiskvision","Partner","https://www.cyberiskvision.com/","","domains","","","","","","","","false" +"Syslog","Juniper SRX","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Juniper%20SRX","azuresentinel","azure-sentinel-solution-junipersrx","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","JuniperSRX","Juniper","[Deprecated] Juniper SRX","The [Juniper SRX](https://www.juniper.net/us/en/products-services/security/srx-series/) connector allows you to easily connect your Juniper SRX logs with Microsoft Sentinel. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias JuniperSRX and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Juniper%20SRX/Parsers/JuniperSRX.txt), on the second line of the query, enter the hostname(s) of your JuniperSRX device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n 2. Select **Apply below configuration to my machines** and select the facilities and severities.\n 3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure and connect the Juniper SRX"", ""description"": ""1. Follow these instructions to configure the Juniper SRX to forward syslog: \n - [Traffic Logs (Security Policy Logs)](https://kb.juniper.net/InfoCenter/index?page=content&id=KB16509&actp=METADATA) \n - [System Logs](https://kb.juniper.net/InfoCenter/index?page=content&id=kb16502)\n2. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Juniper SRX"", ""description"": ""must be configured to export logs via Syslog""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Juniper%20SRX/Data%20Connectors/Connector_Syslog_JuniperSRX.json","true" +"JuniperIDP_CL","JuniperIDP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/JuniperIDP","azuresentinel","azure-sentinel-solution-juniperidp","2021-03-31","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","JuniperIDP","Juniper","[Deprecated] Juniper IDP","The [Juniper](https://www.juniper.net/) IDP data connector provides the capability to ingest [Juniper IDP](https://www.juniper.net/documentation/us/en/software/junos/idp-policy/topics/topic-map/security-idp-overview.html) events into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on Kusto Function to work as expected [**JuniperIDP**](https://aka.ms/sentinel-JuniperIDP-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** IDP OS 5.1 and above is supported by this data connector."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Server."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Follow the configuration steps below to get Juniper IDP logs into Microsoft Sentinel. This configuration enriches events generated by Juniper IDP module to provide visibility on log source information for Juniper IDP logs. Refer to the [Azure Monitor Documentation](https://docs.microsoft.com/azure/azure-monitor/agents/data-sources-json) for more details on these steps.\n1. Download config file [juniper_idp.conf](https://aka.ms/sentinel-JuniperIDP-conf).\n2. Login to the server where you have installed Azure Log Analytics agent.\n3. Copy juniper_idp.conf to the /etc/opt/microsoft/omsagent/**workspace_id**/conf/omsagent.d/ folder.\n4. Edit juniper_idp.conf as follows:\n\n\t i. change the listen port for receiving logs based on your configuration (line 3)\n\n\t ii. replace **workspace_id** with real value of your Workspace ID (lines 58,59,60,63)\n5. Save changes and restart the Azure Log Analytics agent for Linux service with the following command:\n\t\tsudo /opt/microsoft/omsagent/bin/service_control restart\n6. To configure a remote syslog destination, please reference the [SRX Getting Started - Configure System Logging](https://kb.juniper.net/InfoCenter/index?page=content&id=kb16502)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/JuniperIDP/Data%20Connectors/Connector_LogAnalytics_agent_JuniperIDP.json","true" +"","KQL Training","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/KQL%20Training","microsoftsentinelcommunity","azure-sentinel-solution-kqltraining","2022-11-30","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","","","","","","","","false" +"KeeperSecurityEventNewLogs_CL","Keeper Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Keeper%20Security","keepersecurity","keeper-security-integration","2025-06-03","2025-06-03","","Keeper Security","Partner","https://www.keepersecurity.com","","domains","KeeperSecurityPush2","Keeper Security","Keeper Security Push Connector","The [Keeper Security](https://keepersecurity.com) connector provides the capability to read raw event data from Keeper Security in Microsoft Sentinel.","[{""title"": ""1. Create ARM Resources and Provide the Required Permissions"", ""description"": ""This connector reads data from the tables that Keeper Security uses in a Microsoft Analytics Workspace, if the [data forwarding](https://docs.keepersecurity.com/docs/data-forwarding) option is enabled in Keeper Security then raw event data is sent to the Microsoft Sentinel Ingestion API."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated Configuration and Secure Data Ingestion with Entra Application \nClicking on \""Deploy\"" will trigger the creation of Log Analytics tables and a Data Collection Rule (DCR). \nIt will then create an Entra application, link the DCR to it, and set the entered secret in the application. This setup enables data to be sent securely to the DCR using an Entra token.""}}, {""parameters"": {""label"": ""Keeper Security connector resources"", ""applicationDisplayName"": ""Keeper Security Connector Application""}, ""type"": ""DeployPushConnectorButton""}]}, {""title"": ""2. Push your logs into the workspace"", ""description"": ""Use the following parameters to configure the your machine to send the logs to the workspace."", ""instructions"": [{""parameters"": {""label"": ""Tenant ID (Directory ID)"", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Application ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the App Registration Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Secret"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the App Registration Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Endpoint Uri"", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the Data Collection Endpoint Uri""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Rule Immutable ID"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the Data Collection Rule Immutable ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Events Logs Stream Name"", ""value"": ""Custom-KeeperSecurityEventNewLogs""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Update Keeper Admin Console"", ""description"": ""Configure the Keeper Admin Console with the Azure connection details to enable data forwarding to Microsoft Sentinel."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configure Azure Monitor Logs in Keeper Admin Console\n\nIn the [Keeper Admin Console](https://keepersecurity.com/console/), login as the Keeper Administrator. Then go to **Reporting & Alerts** and select **Azure Monitor Logs**.\n\nProvide the following information from Step 2 above into the Admin Console:\n\n- **Azure Tenant ID**: You can find this from Azure's \""Subscriptions\"" area.\n- **Application (client) ID**: This is located in the App registration (KeeperLogging) overview screen\n- **Client Secret Value**: This is the Client Secret Value from the app registration secrets.\n- **Endpoint URL**: This is a URL that is created in the following specific format:\n `https:///dataCollectionRules//streams/
?api-version=2023-01-01`\n\nTo assemble the Endpoint URL:\n\n- **** This comes from Step 2 above\n- **** From the Data Collector Rule, copy the \""Immutable Id\"" value, e.g. `dcr-xxxxxxx`\n- **
** This is the table name created by Azure, e.g. `Custom-KeeperSecurityEventNewLogs`\n\nExample: `https:///dataCollectionRules//streams/Custom-KeeperSecurityEventNewLogs?api-version=2023-01-01`""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rule (DCR). Typically requires Azure RBAC Owner or User Access Administrator role""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Keeper%20Security/Data%20Connectors/KeeperSecurity_ccp/KepperSecurity_Definition.json","true" +"LastPassNativePoller_CL","LastPass","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/LastPass","thecollectiveconsultingbv1584980370320","lastpass-enterprise-monitoring-solution","2021-10-20","2022-01-12","","The Collective Consulting","Partner","https://thecollective.eu","","domains","LastPass_Polling","The Collective Consulting BV","LastPass Enterprise - Reporting (Polling CCP)","The [LastPass Enterprise](https://www.lastpass.com/products/enterprise-password-management-and-sso) connector provides the capability to LastPass reporting (audit) logs into Microsoft Sentinel. The connector provides visibility into logins and activity within LastPass (such as reading and removing passwords).","[{""title"": ""Connect LastPass Enterprise to Microsoft Sentinel"", ""description"": ""Provide the LastPass Provisioning API Key."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""cid"", ""requestObjectKey"": ""queryParametersTemplate"", ""placeHolderName"": ""{{cidPlaceHolder}}""}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true, ""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""LastPass API Key and CID"", ""description"": ""A LastPass API key and CID are required. [See the documentation to learn more about LastPass API](https://support.logmeininc.com/lastpass/help/use-the-lastpass-provisioning-api-lp010068).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/LastPass/Data%20Connectors/LastPassAPIConnector.json","true" +"","Legacy IOC based Threat Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Legacy%20IOC%20based%20Threat%20Protection","azuresentinel","azure-sentinel-solution-ioclegacy","2022-12-19","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"Lookout_CL","Lookout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Lookout","lookoutinc","lookout_mtd_sentinel","2021-10-18","","","Lookout","Partner","https://www.lookout.com/support","","domains","LookoutAPI","Lookout","[DEPRECATED] Lookout","The [Lookout](https://lookout.com) data connector provides the capability to ingest [Lookout](https://enterprise.support.lookout.com/hc/en-us/articles/115002741773-Mobile-Risk-API-Guide#commoneventfields) events into Microsoft Sentinel through the Mobile Risk API. Refer to [API documentation](https://enterprise.support.lookout.com/hc/en-us/articles/115002741773-Mobile-Risk-API-Guide) for more information. The [Lookout](https://lookout.com) data connector provides ability to get events which helps to examine potential security risks and more.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This [Lookout](https://lookout.com) data connector uses Azure Functions to connect to the Mobile Risk API to pull its events into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**LookoutEvents**](https://aka.ms/sentinel-lookoutapi-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Mobile Risk API**\n\n [Follow the instructions](https://enterprise.support.lookout.com/hc/en-us/articles/115002741773-Mobile-Risk-API-Guide#authenticatingwiththemobileriskapi) to obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Follow below mentioned instructions to deploy the [Lookout](https://lookout.com) data connector and the associated Azure Function**\n\n>**IMPORTANT:** Before starting the deployment of the [Lookout](https://lookout.com) data connector, make sure to have the Workspace ID and Workspace Key ready (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Workspace Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Follow below steps for automated deployment of the [Lookout](https://lookout.com) data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-lookoutapi-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Region**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **Function Name**, **Workspace ID**,**Workspace Key**,**Enterprise Name** & **Api Key** and deploy. \n4. Click **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Mobile Risk API Credentials/permissions"", ""description"": ""**EnterpriseName** & **ApiKey** are required for Mobile Risk API. [See the documentation to learn more about API](https://enterprise.support.lookout.com/hc/en-us/articles/115002741773-Mobile-Risk-API-Guide). Check all [requirements and follow the instructions](https://enterprise.support.lookout.com/hc/en-us/articles/115002741773-Mobile-Risk-API-Guide#authenticatingwiththemobileriskapi) for obtaining credentials.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Lookout/Data%20Connectors/Lookout_API_FunctionApp.json","true" +"LookoutMtdV2_CL","Lookout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Lookout","lookoutinc","lookout_mtd_sentinel","2021-10-18","","","Lookout","Partner","https://www.lookout.com/support","","domains","LookoutStreaming_Definition","Microsoft","Lookout Mobile Threat Detection Connector (via Codeless Connector Framework) (Preview)","The [Lookout Mobile Threat Detection](https://lookout.com) data connector provides the capability to ingest events related to mobile security risks into Microsoft Sentinel through the Mobile Risk API. Refer to [API documentation](https://enterprise.support.lookout.com/hc/en-us/articles/115002741773-Mobile-Risk-API-Guide) for more information. This connector helps you examine potential security risks detected in mobile devices.","[{""title"": ""Connect Lookout Mobile Threat Defence connector to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""Before connecting to Lookout, ensure the following prerequisites are completed.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. **ApiKey** is required for Mobile Threat Detection API. See the [documentation](https://enterprise.support.lookout.com/hc/en-us/articles/115002741773-Mobile-Risk-API-Guide) to learn more about API. Check all requirements and follow the [instructions](https://enterprise.support.lookout.com/hc/en-us/articles/115002741773-Mobile-Risk-API-Guide#authenticatingwiththemobileriskapi) for obtaining credentials.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API key"", ""placeholder"": ""Enter your API key "", ""type"": ""password"", ""name"": ""applicationKey"", ""required"": true}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": false, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Lookout/Data%20Connectors/LookoutStreamingConnector_ccp/LookoutStreaming_DataConnectorDefinition.json","true" +"LookoutCloudSecurity_CL","Lookout Cloud Security Platform for Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Lookout%20Cloud%20Security%20Platform%20for%20Microsoft%20Sentinel","lookoutinc","lookout_cloudsecurity_sentinel","2023-02-17","","","Lookout","Partner","https://www.lookout.com/support","","domains","LookoutCloudSecurityDataConnector","Lookout","Lookout Cloud Security for Microsoft Sentinel","This connector uses a Agari REST API connection to push data into Microsoft Sentinel Log Analytics.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Agari REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**Step-by-Step Instructions**\n\n As a prerequisite to this integration, first, you need to configure an API client on Lookout's Management Console. From the Management Console, you can add one or more clients and configure the appropriate permissions and actions for each. \n\n 1. Name - The name given to this client. \n\n 2. Client ID - the unique ID that was provided for this client. \n\n 3. Permissions - The permissions enabled for this client. The permissions you check are those that the client will be allowed to access. The listed options are Activity, Violation, Anomaly, Insights, and Profile \n\n 4. Service URL - The URL used to access this client.It must start with https:// \n\n 5. Authorized IPs - The valid IP address or addresses that apply to this client. \n\n 6. Actions - The actions you can take for this client. Click the icon for the action you want to perform. Editing client information, displaying the client secret, or deleting the client. \n\n **To add a new API client:** \n\n 1. Go to Administration > Enterprise Integration > API Clients and click New. \n\n 2. Enter a Name (required) and a Description (optional). \n\n 3. Enter the Client ID that was provided to you. \n\n 4. Select one or more Permissions from the dropdown list. \n\n 5. Enter one or more Authorized IP addresses for this client. Separate each address with a comma.\n\n 6. Click Save. \n\n When prompted, copy the string for the client's secret. You will need this information (along with the client ID) to authenticate to the API gateway. ""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Azure Blob Storage connection string and container name, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-LookoutCS-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Lookout Client ID**, **Lookout Client Secret**, **Lookout Base url**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-Lookout-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions.\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tLookoutClientId\n\t\tLookoutApiSecret\n\t\tBaseurl\n\t\tWorkspaceID\n\t\tPrimaryKey\n\t\tlogAnalyticsUri (Optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Lookout%20Cloud%20Security%20Platform%20for%20Microsoft%20Sentinel/Data%20Connectors/LookoutCSConnector/LookoutCloudSecurityConnector_API_FunctionApp.json","true" +"ThreatIntelIndicators","Lumen Defender Threat Feed","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Lumen%20Defender%20Threat%20Feed","centurylink","azure-sentinel-solution-lumen-defender-threat-feed","2025-09-12","2025-09-12","","Lumen Technologies, Inc.","Partner","https://www.lumen.com/en-us/contact-us/support.html","","domains","LumenThreatFeedConnector","Lumen Technologies, Inc.","Lumen Defender Threat Feed Data Connector","The [Lumen Defender Threat Feed](https://bll-analytics.mss.lumen.com/analytics) connector provides the capability to ingest STIX-formatted threat intelligence indicators from Lumen's Black Lotus Labs research team into Microsoft Sentinel. The connector automatically downloads and uploads daily threat intelligence indicators including IPv4 addresses and domains to the ThreatIntelIndicators table via the STIX Objects Upload API.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions with Durable Functions to connect to the Lumen Defender Threat Feed API and upload threat intelligence indicators to Microsoft Sentinel via the STIX Objects API. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": ""Configuration"", ""description"": ""**STEP 1 - Obtain Lumen Defender Threat Feed API Key**\n\n1. [Contact Lumen](mailto:DefenderThreatFeedSales@Lumen.com?subject=API%20Access%20Request) to obtain API access to our Threat Feed API service\n2. Obtain your API key for authentication.""}, {""title"": """", ""description"": ""**STEP 2 - Configure Azure Entra ID Application and gather information**\n\n1. Create an Entra application. [See the documentation for a guide to registering an application in Microsoft Entra ID.](https://learn.microsoft.com/en-us/entra/identity-platform/quickstart-register-app)\n2. Create a client secret and note the Application ID, Tenant ID, and Client Secret\n4. Assign the **Microsoft Sentinel Contributor** role to the application on your Microsoft Sentinel Log Analytics Workspace\n5. Make note of your Workspace ID, as well as the App Insights Workspace Resource ID, which can be obtained from the overview page of the Log Analytics Workspace for your Microsoft Sentinel instance. Click on the \u201cJSON View\u201d link in the top right and the Resource ID will be displayed at the top with a copy button."", ""instructions"": [{""parameters"": {""fillWith"": [""TenantId""], ""label"": ""Tenant ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""**STEP 3 - Enable the Threat Intelligence Upload Indicators API (Preview) data connector in Microsoft Sentinel**\n\n1. Deploy the **Threat Intelligence (New) Solution**, which includes the **Threat Intelligence Upload Indicators API (Preview)**\n2. Browse to the Content Hub, find and select the **Threat Intelligence (NEW)** solution.\n3. Select the **Install/Update** button.""}, {""title"": """", ""description"": ""**STEP 4 - Deploy the Azure Function**\n\n**IMPORTANT:** Before deploying the Lumen Defender Threat Feed connector, have the Tenant ID, Workspace ID, App Insights Workspace Resource ID, Azure Entra application details (Client ID, Client Secret), and Lumen API key readily available.\n\n1. Click the Deploy to Azure button.\n\n[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FLumen%20Defender%20Threat%20Feed%2FData%2520Connectors%2FLumenThreatFeed%2Fazuredeploy_Connector_LumenThreatFeed_AzureFunction.json)\n\n2. Fill in the appropriate values for each parameter:\n\n- Subscription: Confirm the correct subscription is selected or use the dropdown to change your selection\n- Resource Group: Select the resource group to be used by the Function App and related resources\n- Function Name: Enter a globally unique name with an 11-character limit. Adhere to your organization\u2019s naming convention and ensure the name is globally unique since it is used (along with the uniqueString() function) to identify the ARM template being deployed.\n- Workspace ID: Found in the \""Overview\"" tab for the Log Analytics Workspace of the Microsoft Sentinel instance and provided for convenience on the connector information page.\n- Lumen API Key: Obtain an API key through Lumen support\n- Lumen Base URL: Filled in automatically and should generally not be changed. This URL contains API endpoints used by the connector\n- Tenant ID: Obtained from the Entra App Registration overview page for the registered application (listed as Directory ID) and can also be obtained from the Tenant Information page in Azure\n- Client ID: Obtained from the Entra App Registration overview page for the registered application (listed as Application ID)\n- Client Secret: Obtained when the secret is created during the app registration process. It can only be viewed when first created and is hidden permanently afterwards. Rerun the app registration process to obtain a new Client Secret if necessary.\n- App Insights Workspace Resource ID: Obtained from the overview page of the Log Analytics Workspace for your Microsoft Sentinel instance. Click on the \""JSON View\"" link in the top right and the Resource ID will be displayed at the top with a copy button.\n- Blob Container Name: Use the default name unless otherwise required. Azure Blob Storage is used for temporary storage and processing of threat indicators.""}, {""title"": """", ""description"": ""**STEP 5 - Verify Deployment**\n\n1. The connector polls for indicator updates every 15 minutes.\n2. Monitor the Function App logs in the Azure Portal to verify successful execution\n3. After the app performs its first run, review the indicators ingested by either viewing the \u201cLumen Defender Threat Feed Overview\u201d workbook or viewing the \u201cThreat Intelligence\u201d section in Microsoft Sentinel. In Microsoft Sentinel \u201cThreat Intelligence\u201d, filter for source \u201cLumen\u201d to display only Lumen generated indicators.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and write permissions on the Log Analytics workspace are required."", ""providerDisplayName"": ""Log Analytics Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": false}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Azure Entra App Registration"", ""description"": ""An Azure Entra application registration with the Microsoft Sentinel Contributor role assigned is required for STIX Objects API access. [See the documentation to learn more about Azure Entra applications](https://docs.microsoft.com/azure/active-directory/develop/quickstart-register-app).""}, {""name"": ""Microsoft Sentinel Contributor Role"", ""description"": ""Microsoft Sentinel Contributor role is required for the Azure Entra application to upload threat intelligence indicators.""}, {""name"": ""Lumen Defender Threat Feed API Key"", ""description"": ""A Lumen Defender Threat Feed API Key is required for accessing threat intelligence data. [Contact Lumen for API access](mailto:DefenderThreatFeedSales@Lumen.com?subject=API%20Access%20Request).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Lumen%20Defender%20Threat%20Feed/Data%20Connectors/LumenThreatFeed/LumenThreatFeedConnector_ConnectorUI.json","true" +"ThreatIntelligenceIndicator","MISP2Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MISP2Sentinel","microsoftsentinelcommunity","azure-sentinel-solution-misp2sentinel","2023-07-29","2023-07-29","","Community","Community","https://github.com/cudeso/misp2sentinel","","domains,verticals","MISP2SentinelConnector","MISP project & cudeso.be","MISP2Sentinel","This solution installs the MISP2Sentinel connector that allows you to automatically push threat indicators from MISP to Microsoft Sentinel via the Upload Indicators REST API. After installing the solution, configure and enable this data connector by following guidance in Manage solution view.","[{""title"": ""Installation and setup instructions"", ""description"": ""Use the documentation from this GitHub repository to install and configure the MISP to Microsoft Sentinel connector: \n\nhttps://github.com/cudeso/misp2sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.SecurityInsights/threatintelligence/write"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MISP2Sentinel/Data%20Connectors/MISP2SentinelConnector_UploadIndicatorsAPI.json","true" +"MailGuard365_Threats_CL","MailGuard 365","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MailGuard%20365","mailguardptylimited","microsoft-sentinel-solution-mailguard365","2023-05-09","2023-06-08","","MailGuard 365","Partner","https://www.mailguard365.com/support/","","domains","MailGuard365","MailGuard365","MailGuard 365","MailGuard 365 Enhanced Email Security for Microsoft 365. Exclusive to the Microsoft marketplace, MailGuard 365 is integrated with Microsoft 365 security (incl. Defender) for enhanced protection against advanced email threats like phishing, ransomware and sophisticated BEC attacks.","[{""title"": ""Configure and connect MailGuard 365"", ""description"": ""1. In the MailGuard 365 Console, click **Settings** on the navigation bar.\n2. Click the **Integrations** tab.\n3. Click the **Enable Microsoft Sentinel**.\n4. Enter your workspace id and primary key from the fields below, click **Finish**.\n5. For additional instructions, please contact MailGuard 365 support."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MailGuard%20365/Data%20Connectors/MailGuard365.json","true" +"MailRiskEventEmails_CL","MailRisk","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MailRisk","securepracticeas1650887373770","microsoft-sentinel-solution-mailrisk","2023-03-16","2025-10-27","","Secure Practice","Partner","https://securepractice.co/support","","domains","SecurePracticeMailRiskConnector","Secure Practice","MailRisk by Secure Practice","The MailRisk by Secure Practice connector allows you to ingest email threat intelligence data from the MailRisk API into Microsoft Sentinel. This connector provides visibility into reported emails, risk assessments, and security events related to email threats.","[{""title"": ""1. Obtain Secure Practice API Credentials"", ""description"": ""Log in to your Secure Practice account and generate an API Key and API Secret if you haven't already.""}, {""title"": ""2. Connect to MailRisk API"", ""description"": ""Enter your Secure Practice API credentials below. The credentials will be securely stored and used to authenticate API requests."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""Enter your Secure Practice API Key"", ""type"": ""text"", ""name"": ""apiKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Secret"", ""placeholder"": ""Enter your Secure Practice API Secret"", ""type"": ""password"", ""name"": ""apiSecret""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""disconnectLabel"": ""Disconnect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": false, ""action"": false}}], ""customs"": [{""name"": ""API credentials"", ""description"": ""Your Secure Practice API key pair is also needed, which are created in the [settings in the admin portal](https://manage.securepractice.co/settings/security). Generate a new key pair with description `Microsoft Sentinel`.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MailRisk/Data%20Connectors/MailRisk_CCP/MailRisk_ConnectorDefinition.json","true" +"","Malware Protection Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Malware%20Protection%20Essentials","azuresentinel","azure-sentinel-solution-malwareprotection","2023-09-25","2023-09-25","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","MarkLogicAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MarkLogicAudit","azuresentinel","azure-sentinel-solution-marklogicaudit","2022-08-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","MaturityModelForEventLogManagementM2131","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MaturityModelForEventLogManagementM2131","azuresentinel","azure-sentinel-solution-maturitymodelforeventlogma","2021-12-05","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"Syslog","McAfee Network Security Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/McAfee%20Network%20Security%20Platform","azuresentinel","azure-sentinel-solution-mcafeensp","2021-06-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","McAfeeNSP","McAfee","[Deprecated] McAfee Network Security Platform","The [McAfee® Network Security Platform](https://www.mcafee.com/enterprise/en-us/products/network-security-platform.html) data connector provides the capability to ingest [McAfee® Network Security Platform events](https://docs.mcafee.com/bundle/network-security-platform-10.1.x-integration-guide-unmanaged/page/GUID-8C706BE9-6AC9-4641-8A53-8910B51207D8.html) into Microsoft Sentinel. Refer to [McAfee® Network Security Platform](https://docs.mcafee.com/bundle/network-security-platform-10.1.x-integration-guide-unmanaged/page/GUID-F7D281EC-1CC9-4962-A7A3-5A9D9584670E.html) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**McAfeeNSPEvent**](https://aka.ms/sentinel-mcafeensp-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using McAfee\u00ae Network Security Platform version: 10.1.x"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Server where the McAfee\u00ae Network Security Platform logs are forwarded.\n\n> Logs from McAfee\u00ae Network Security Platform Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure McAfee\u00ae Network Security Platform event forwarding"", ""description"": ""Follow the configuration steps below to get McAfee\u00ae Network Security Platform logs into Microsoft Sentinel.\n1. [Follow these instructions](https://docs.mcafee.com/bundle/network-security-platform-10.1.x-product-guide/page/GUID-E4A687B0-FAFB-4170-AC94-1D968A10380F.html) to forward alerts from the Manager to a syslog server.\n2. Add a syslog notification profile, [more details here](https://docs.mcafee.com/bundle/network-security-platform-10.1.x-product-guide/page/GUID-5BADD5D7-21AE-4E3B-AEE2-A079F3FD6A38.html). This is mandatory. While creating profile, to make sure that events are formatted correctly, enter the following text in the Message text box:\n\t\t:|SENSOR_ALERT_UUID|ALERT_TYPE|ATTACK_TIME|ATTACK_NAME|ATTACK_ID\n\t\t|ATTACK_SEVERITY|ATTACK_SIGNATURE|ATTACK_CONFIDENCE|ADMIN_DOMAIN|SENSOR_NAME|INTERFACE\n\t\t|SOURCE_IP|SOURCE_PORT|DESTINATION_IP|DESTINATION_PORT|CATEGORY|SUB_CATEGORY\n\t\t|DIRECTION|RESULT_STATUS|DETECTION_MECHANISM|APPLICATION_PROTOCOL|NETWORK_PROTOCOL|""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/McAfee%20Network%20Security%20Platform/Data%20Connectors/McAfeeNSP.json","true" +"Syslog","McAfee ePolicy Orchestrator","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/McAfee%20ePolicy%20Orchestrator","azuresentinel","azure-sentinel-solution-mcafeeepo","2021-03-25","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","McAfeeePO","McAfee","[Deprecated] McAfee ePolicy Orchestrator (ePO)","The McAfee ePolicy Orchestrator data connector provides the capability to ingest [McAfee ePO](https://www.mcafee.com/enterprise/en-us/products/epolicy-orchestrator.html) events into Microsoft Sentinel through the syslog. Refer to [documentation](https://docs.mcafee.com/bundle/epolicy-orchestrator-landing/page/GUID-0C40020F-5B7F-4549-B9CC-0E017BC8797F.html) for more information.","[{""title"": """", ""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected [**McAfeeEPOEvent**](https://aka.ms/sentinel-McAfeeePO-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n2. Select **Apply below configuration to my machines** and select the facilities and severities.\n3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure McAfee ePolicy Orchestrator event forwarding to Syslog server"", ""description"": ""[Follow these instructions](https://kcm.trellix.com/corporate/index?page=content&id=KB87927) to add register syslog server.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/McAfee%20ePolicy%20Orchestrator/Data%20Connectors/Connector_McAfee_ePO.json","true" +"","Microsoft 365","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20365","azuresentinel","azure-sentinel-solution-office365","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"FinanceOperationsActivity_CL","Microsoft Business Applications","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Business%20Applications","sentinel4dynamics365","powerplatform","2023-04-19","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Dynamics365Finance","Microsoft","Dynamics 365 Finance and Operations","Dynamics 365 for Finance and Operations is a comprehensive Enterprise Resource Planning (ERP) solution that combines financial and operational capabilities to help businesses manage their day-to-day operations. It offers a range of features that enable businesses to streamline workflows, automate tasks, and gain insights into operational performance.

The Dynamics 365 Finance and Operations data connector ingests Dynamics 365 Finance and Operations admin activities and audit logs as well as user business process and application activities logs into Microsoft Sentinel.","[{""description"": "">Connectivity to Finance and Operations requires a Microsoft Entra app registration (client ID and secret). You'll also need the Microsoft Entra tenant ID and the Finance Operations Organization URL.""}, {""description"": ""To enable data collection, create a role in Dynamics 365 Finance and Operations with permissions to view the Database Log entity. Assign this role to a dedicated Finance and Operations user, mapped to the client ID of a Microsoft Entra app registration. Follow these steps to complete the process:""}, {""title"": ""Step 1 - Microsoft Entra app registration"", ""description"": ""1. Navigate to the [Microsoft Entra portal](https://entra.microsoft.com). \n2. Under Applications, click on **App Registrations** and create a new app registration (leave all defaults).\n3. Open the new app registration and create a new secret.\n4. Retain the **Tenant ID**, **Application (client) ID**, and **Client secret** for later use.""}, {""title"": ""Step 2 - Create a role for data collection in Finance and Operations"", ""description"": ""1. In the Finance and Operations portal, navigate to **Workspaces > System administration** and click **Security Configuration**\n2. Under **Roles** click **Create new** and give the new role a name e.g. Database Log Viewer.\n3. Select the new role in the list of roles and click **Privileges** and than **Add references**.\n4. Select **Database log Entity View** from the list of privileges.\n5. Click on **Unpublished objects** and then **Publish all** to publish the role.""}, {""title"": ""Step 3 - Create a user for data collection in Finance and Operations"", ""description"": ""1. In the Finance and Operations portal, navigate to **Modules > System administration** and click **Users**\n2. Create a new user and assign the role created in the previous step to the user.""}, {""title"": ""Step 4 - Register the Microsoft Entra app in Finance and Operations"", ""description"": ""1. In the F&O portal, navigate to **System administration > Setup > Microsoft Entra applications** (Azure Active Directory applications)\n2. Create a new entry in the table. In the **Client Id** field, enter the application ID of the app registered in Step 1.\n3. In the **Name** field, enter a name for the application.\n4. In the **User ID** field, select the user ID created in the previous step.""}, {""description"": ""Connect using client credentials"", ""title"": ""Connect events from Dyanmics 365 Finance and Operations to Microsoft Sentinel"", ""instructions"": [{""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""label"": ""Add environment"", ""isPrimary"": true, ""title"": ""Dynamics 365 Finance and Operations connection"", ""instructionSteps"": [{""title"": ""Environment details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Microsoft Entra tenant ID."", ""placeholder"": ""Tenant ID (GUID)"", ""type"": ""text"", ""name"": ""tenantId""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""App registration client ID"", ""placeholder"": ""Finance and Operations client ID"", ""type"": ""text"", ""name"": ""clientId""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""App registration client secret"", ""placeholder"": ""Finance and Operations client secret"", ""type"": ""password"", ""name"": ""clientSecret""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Finance and Operations organization URL"", ""placeholder"": ""https://dynamics-dev.axcloud.dynamics.com"", ""type"": ""text"", ""name"": ""auditHost""}}]}]}}]}, {""title"": ""Organizations"", ""description"": ""Each row represents an Finance and Operations connection"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Environment URL"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft Entra app registration"", ""description"": ""Application client ID and secret used to access Dynamics 365 Finance and Operations.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Business%20Applications/Data%20Connectors/DynamicsFinOpsPollerConnector/DynamicsFinOps_DataConnectorDefinition.json","true" +"LLMActivity","Microsoft Copilot","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Copilot","azuresentinel","azure-sentinel-solution-microsoftcopilot","2025-10-01","","","Microsoft","Microsoft","https://support.microsoft.com","","domains","MicrosoftCopilot","Microsoft","Microsoft Copilot","The Microsoft Copilot logs connector in Microsoft Sentinel enables the seamless ingestion of Copilot-generated activity logs into Microsoft Sentinel for advanced threat detection, investigation, and response. It collects telemetry from Microsoft Copilot services - such as usage data, prompts and system responses - and ingests into Microsoft Sentinel, allowing security teams to monitor for misuse, detect anomalies, and maintain compliance with organizational policies.","[{""title"": ""Connect Microsoft Copilot audit logs to Microsoft Sentinel"", ""description"": ""This connector uses the Office Management API to get your Microsoft Copilot audit logs. The logs will be stored and processed in your existing Microsoft Sentinel workspace. You can find the data in the **LLMActivity** table."", ""instructions"": [{""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Tenant Permissions"", ""description"": ""'Security Administrator' or 'Global Administrator' on the workspace's tenant.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Copilot/Data%20Connectors/MicrosoftCopilot_ConnectorDefinition.json","true" +"","Microsoft Defender For Identity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20For%20Identity","azuresentinel","azure-sentinel-solution-mdefenderforidentity","2022-04-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"","Microsoft Defender Threat Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20Threat%20Intelligence","azuresentinel","azure-sentinel-solution-microsoftdefenderthreatint","2023-03-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","Microsoft Defender XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR","azuresentinel","azure-sentinel-solution-microsoft365defender","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"SecurityAlert","Microsoft Defender for Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20for%20Cloud","azuresentinel","azure-sentinel-solution-microsoftdefenderforcloud","2022-05-17","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftDefenderForCloudTenantBased","Microsoft","Tenant-based Microsoft Defender for Cloud","Microsoft Defender for Cloud is a security management tool that allows you to detect and quickly respond to threats across Azure, hybrid, and multi-cloud workloads. This connector allows you to stream your MDC security alerts from Microsoft 365 Defender into Microsoft Sentinel, so you can can leverage the advantages of XDR correlations connecting the dots across your cloud resources, devices and identities and view the data in workbooks, queries and investigate and respond to incidents. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2269832&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect Tenant-based Microsoft Defender for Cloud to Microsoft Sentinel"", ""description"": ""After connecting this connector, **all** your Microsoft Defender for Cloud subscriptions' alerts will be sent to this Microsoft Sentinel workspace.\n\n> Your Microsoft Defender for Cloud alerts are connected to stream through the Microsoft 365 Defender. To benefit from automated grouping of the alerts into incidents, connect the Microsoft 365 Defender incidents connector. Incidents can be viewed in the incidents queue."", ""instructions"": [{""parameters"": {""title"": ""Tenant-based Microsoft Defender for Cloud"", ""connectorKind"": ""MicrosoftDefenderForCloudTenantBased"", ""enable"": true, ""newPipelineEnabledFeatureFlagConfig"": {""feature"": ""MdcAlertsByMtp"", ""featureStates"": {""1"": 2, ""2"": 2, ""3"": 2, ""4"": 2, ""5"": 2}}, ""infoBoxMessage"": ""Your Microsoft Defender for Cloud alerts are connected to stream through the Microsoft 365 Defender. To benefit from automated grouping of the alerts into incidents, connect the Microsoft 365 Defender incidents connector. Incidents can be viewed in the incidents queue"", ""shouldAlwaysDisplayInfoMessage"": true}, ""type"": ""MicrosoftDefenderForCloudTenantBased""}]}]","{""tenant"": [""SecurityAdmin"", ""GlobalAdmin""], ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""tenantMember"": true}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20for%20Cloud/Data%20Connectors/MicrosoftDefenderForCloudTenantBased.json","true" +"","Microsoft Defender for Cloud Apps","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20for%20Cloud%20Apps","azuresentinel","azure-sentinel-solution-microsoftdefendercloudapps","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"SecurityAlert","Microsoft Defender for Office 365","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20for%20Office%20365","azuresentinel","azure-sentinel-solution-microsoftdefenderforo365","2022-05-17","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","OfficeATP","Microsoft","Microsoft Defender for Office 365 (Preview)","Microsoft Defender for Office 365 safeguards your organization against malicious threats posed by email messages, links (URLs) and collaboration tools. By ingesting Microsoft Defender for Office 365 alerts into Microsoft Sentinel, you can incorporate information about email- and URL-based threats into your broader risk analysis and build response scenarios accordingly.

The following types of alerts will be imported:

- A potentially malicious URL click was detected
- Email messages containing malware removed after delivery
- Email messages containing phish URLs removed after delivery
- Email reported by user as malware or phish
- Suspicious email sending patterns detected
- User restricted from sending email

These alerts can be seen by Office customers in the ** Office Security and Compliance Center**.

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2219942&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect Microsoft Defender for Office 365 alerts to Microsoft Sentinel"", ""description"": ""Connecting Microsoft Defender for Office 365 will cause your data that is collected by Microsoft Defender for Office 365 service to be stored and processed in the location that you have configured your Microsoft Sentinel workspace."", ""instructions"": [{""parameters"": {""connectorKind"": ""OfficeATP"", ""title"": ""Microsoft Defender for Office 365"", ""enable"": true}, ""type"": ""SentinelResourceProvider""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""tenant"": [""GlobalAdmin"", ""SecurityAdmin""], ""licenses"": [""OfficeATP""]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20for%20Office%20365/Data%20Connectors/template_OfficeATP.json","true" +"","Microsoft Entra ID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID","azuresentinel","azure-sentinel-solution-azureactivedirectory","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"","Microsoft Entra ID Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID%20Protection","azuresentinel","azure-sentinel-solution-azureactivedirectoryip","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"Event","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-ExchangeAdminAuditLogEvents","Microsoft","[Deprecated] Microsoft Exchange Logs and Events","Deprecated, use the 'ESI-Opt' dataconnectors. You can stream all Exchange Audit events, IIS Logs, HTTP Proxy logs and Security Event logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to view dashboards, create custom alerts, and improve investigation. This is used by Microsoft Exchange Security Workbooks to provide security insights of your On-Premises Exchange environment","[{""description"": "">**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions)""}, {""title"": ""1. Download and install the agents needed to collect logs for Microsoft Sentinel"", ""description"": ""Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Deploy Monitor Agents"", ""description"": ""This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers"", ""instructions"": [{""parameters"": {""title"": ""Select which agent you want to install in your servers to collect logs:"", ""instructionSteps"": [{""title"": ""[Prefered] Azure Monitor Agent via Azure Arc"", ""description"": ""**Deploy the Azure Arc Agent**\n> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""title"": ""Install Azure Log Analytics Agent (Deprecated on 31/08/2024)"", ""description"": ""1. Download the Azure Log Analytics Agent and choose the deployment method in the below link."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Deploy log injestion following choosed options"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""[Option 1] MS Exchange Management Log collection"", ""description"": ""Select how to stream MS Exchange Admin Audit event logs"", ""instructions"": [{""parameters"": {""title"": ""MS Exchange Admin Audit event logs"", ""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Microsoft Exchange Admin Audit Events logs are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCR.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption1-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCR, Type Event log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Custom' option, enter 'MSExchange Management' as expression and Add it.\n6. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace **Legacy agents management**, select **Windows Event logs**.\n2. Click **Add Windows event log** and enter **MSExchange Management** as log name.\n3. Collect Error, Warning and Information types\n4. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 2] Security/Application/System logs of Exchange Servers"", ""description"": ""Select how to stream Security/Application/System logs of Exchange Servers"", ""instructions"": [{""parameters"": {""title"": ""Security Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - Security Event logs"", ""description"": ""**Enable data collection rule for Security Logs**\nSecurity Events logs are collected only from **Windows** agents.\n1. Add Exchange Servers on *Resources* tab.\n2. Select Security log level\n\n> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition."", ""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 0}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""title"": ""Application and System Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Application and System Events logs are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCR.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCR, Type Event log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Basic' option.\n6. For Application, select 'Critical', 'Error' and 'Warning'. For System, select Critical/Error/Warning/Information. \n7. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Windows Event logs**.\n2. Click **Add Windows event log** and search **Application** as log name.\n3. Click **Add Windows event log** and search **System** as log name.\n4. Collect Error (for all), Warning (for all) and Information (for System) types\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 3 and 4] Security logs of Domain Controllers"", ""description"": ""Select how to stream Security logs of Domain Controllers. If you want to implement Option 3, you just need to select DC on same site as Exchange Servers. If you want to implement Option 4, you can select all DCs of your forest."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""[Option 3] List only Domain Controllers on the same site as Exchange Servers for next step"", ""description"": ""**This limits the quantity of data injested but some incident can't be detected.**""}, {""title"": ""[Option 4] List all Domain Controllers of your Active-Directory Forest for next step"", ""description"": ""**This allows collecting all security events**""}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""title"": ""Security Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - Security Event logs"", ""description"": ""**Enable data collection rule for Security Logs**\nSecurity Events logs are collected only from **Windows** agents.\n1. Add chosen DCs on *Resources* tab.\n2. Select Security log level\n\n> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition."", ""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 0}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 5] IIS logs of Exchange Servers"", ""description"": ""Select how to stream IIS logs of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> IIS logs are collected only from **Windows** agents."", ""instructions"": [{""type"": ""AdminAuditEvents""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption5-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create DCR, Type IIS log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. Select the created DCE. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'IIS logs' (Do not enter a path if IIS Logs path is configured by default). Click on 'Add data source'\n6. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **IIS Logs**.\n2. Check **Collect W3C format IIS log files**\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 6] Message Tracking of Exchange Servers"", ""description"": ""Select how to stream Message Tracking of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Message Tracking are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""text"": ""**Attention**, Custom logs in Monitor Agent is in Preview. The deployment doesn't work as expected for the moment (March 2023)."", ""inline"": false}, ""type"": ""InfoMessage""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule and Custom Table"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption6-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE, like ESI-ExchangeServers. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create Custom DCR Table"", ""description"": ""1. Download the Example file from [Microsoft Sentinel GitHub](https://aka.ms/Sentinel-Sample-ESI-MessageTrackingExampleFile).\n2. From the Azure Portal, navigate to [Workspace Analytics](https://portal.azure.com/#view/HubsExtension/BrowseResource/resourceType/Microsoft.OperationalInsights%2Fworkspaces) and select your target Workspace.\n3. Click in 'Tables', click **+ Create** at the top and select **New Custom log (DCR-Based)**.\n4. In the **Basics** tab, enter **MessageTrackingLog** on the Table name, create a Data Collection rule with the name **DCR-Option6-MessageTrackingLogs** (for example) and select the previously created Data collection Endpoint.\n5. In the **Schema and Transformation** tab, choose the downloaded sample file and click on **Transformation Editor**.\n6. In the transformation field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(['date-time'])\n| extend\n clientHostname = ['client-hostname'],\n clientIP = ['client-ip'],\n connectorId = ['connector-id'],\n customData = ['custom-data'],\n eventId = ['event-id'],\n internalMessageId = ['internal-message-id'],\n logId = ['log-id'],\n messageId = ['message-id'],\n messageInfo = ['message-info'],\n messageSubject = ['message-subject'],\n networkMessageId = ['network-message-id'],\n originalClientIp = ['original-client-ip'],\n originalServerIp = ['original-server-ip'],\n recipientAddress= ['recipient-address'],\n recipientCount= ['recipient-count'],\n recipientStatus= ['recipient-status'],\n relatedRecipientAddress= ['related-recipient-address'],\n returnPath= ['return-path'],\n senderAddress= ['sender-address'],\n senderHostname= ['server-hostname'],\n serverIp= ['server-ip'],\n sourceContext= ['source-context'],\n schemaVersion=['schema-version'],\n messageTrackingTenantId = ['tenant-id'],\n totalBytes = ['total-bytes'],\n transportTrafficType = ['transport-traffic-type']\n| project-away\n ['client-ip'],\n ['client-hostname'],\n ['connector-id'],\n ['custom-data'],\n ['date-time'],\n ['event-id'],\n ['internal-message-id'],\n ['log-id'],\n ['message-id'],\n ['message-info'],\n ['message-subject'],\n ['network-message-id'],\n ['original-client-ip'],\n ['original-server-ip'],\n ['recipient-address'],\n ['recipient-count'],\n ['recipient-status'],\n ['related-recipient-address'],\n ['return-path'],\n ['sender-address'],\n ['server-hostname'],\n ['server-ip'],\n ['source-context'],\n ['schema-version'],\n ['tenant-id'],\n ['total-bytes'],\n ['transport-traffic-type']*\n\n8. Click 'Run' and after 'Apply'.\n9. Click **Next**, then click **Create**.""}, {""title"": ""C. Modify the created DCR, Type Custom log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Select the previously created DCR, like **DCR-Option6-MessageTrackingLogs**.\n3. In the **Resources** tab, enter you Exchange Servers.\n4. In **Data Sources**, add a Data Source type 'Custom Text logs' and enter 'C:\\Program Files\\Microsoft\\Exchange Server\\V15\\TransportRoles\\Logs\\MessageTracking\\*.log' in file pattern, 'MessageTrackingLog_CL' in Table Name.\n6.in Transform field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(['date-time'])\n| extend\n clientHostname = ['client-hostname'],\n clientIP = ['client-ip'],\n connectorId = ['connector-id'],\n customData = ['custom-data'],\n eventId = ['event-id'],\n internalMessageId = ['internal-message-id'],\n logId = ['log-id'],\n messageId = ['message-id'],\n messageInfo = ['message-info'],\n messageSubject = ['message-subject'],\n networkMessageId = ['network-message-id'],\n originalClientIp = ['original-client-ip'],\n originalServerIp = ['original-server-ip'],\n recipientAddress= ['recipient-address'],\n recipientCount= ['recipient-count'],\n recipientStatus= ['recipient-status'],\n relatedRecipientAddress= ['related-recipient-address'],\n returnPath= ['return-path'],\n senderAddress= ['sender-address'],\n senderHostname= ['server-hostname'],\n serverIp= ['server-ip'],\n sourceContext= ['source-context'],\n schemaVersion=['schema-version'],\n messageTrackingTenantId = ['tenant-id'],\n totalBytes = ['total-bytes'],\n transportTrafficType = ['transport-traffic-type']\n| project-away\n ['client-ip'],\n ['client-hostname'],\n ['connector-id'],\n ['custom-data'],\n ['date-time'],\n ['event-id'],\n ['internal-message-id'],\n ['log-id'],\n ['message-id'],\n ['message-info'],\n ['message-subject'],\n ['network-message-id'],\n ['original-client-ip'],\n ['original-server-ip'],\n ['recipient-address'],\n ['recipient-count'],\n ['recipient-status'],\n ['related-recipient-address'],\n ['return-path'],\n ['sender-address'],\n ['server-hostname'],\n ['server-ip'],\n ['source-context'],\n ['schema-version'],\n ['tenant-id'],\n ['total-bytes'],\n ['transport-traffic-type']* \n7. Click on 'Add data source'.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\n1. Under workspace **Settings** part, select **Tables**, click **+ Create** and click on **New custom log (MMA-Based)**.\n2. Select Sample file **[MessageTracking Sample](https://aka.ms/Sentinel-Sample-ESI-MessageTrackingLogsSampleCSV)** and click Next\n3. Select type **Windows** and enter the path **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\TransportRoles\\Logs\\MessageTracking\\*.log**. Click Next.\n4. Enter **MessageTrackingLog** as Table name and click Next.\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 7] HTTP Proxy of Exchange Servers"", ""description"": ""Select how to stream HTTP Proxy of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Message Tracking are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""text"": ""**Attention**, Custom logs in Monitor Agent is in Preview. The deployment doesn't work as expected for the moment (March 2023)."", ""inline"": false}, ""type"": ""InfoMessage""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption7-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create Custom DCR Table"", ""description"": ""1. Download the Example file from [Microsoft Sentinel GitHub](https://aka.ms/Sentinel-Sample-ESI-HTTPProxyExampleFile).\n2. From the Azure Portal, navigate to [Workspace Analytics](https://portal.azure.com/#view/HubsExtension/BrowseResource/resourceType/Microsoft.OperationalInsights%2Fworkspaces) and select your target Workspace.\n3. Click in 'Tables', click **+ Create** at the top and select **New Custom log (DCR-Based)**.\n4. In the **Basics** tab, enter **ExchangeHttpProxy** on the Table name, create a Data Collection rule with the name **DCR-Option7-HTTPProxyLogs** (for example) and select the previously created Data collection Endpoint.\n5. In the **Schema and Transformation** tab, choose the downloaded sample file and click on **Transformation Editor**.\n6. In the transformation field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(DateTime)\n| project-away DateTime\n*\n\n8. Click 'Run' and after 'Apply'.\n9. Click **Next**, then click **Create**.""}, {""title"": ""C. Modify the created DCR, Type Custom log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Select the previously created DCR, like **DCR-Option7-HTTPProxyLogs**.\n3. In the **Resources** tab, enter you Exchange Servers.\n4. In **Data Sources**, add a Data Source type 'Custom Text logs' and enter 'C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Autodiscover\\*.log' in file pattern, 'ExchangeHttpProxy_CL' in Table Name.\n6.in Transform field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(DateTime)\n| project-away DateTime* \n7. Click on 'Add data source'.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\n1. Under workspace **Settings** part, select **Tables**, click **+ Create** and click on **New custom log (MMA-Based)**.\n2. Select Sample file **[MessageTracking Sample](https://aka.ms/Sentinel-Sample-ESI-HttpProxySampleCSV)** and click Next\n3. Select type **Windows** and enter all the following paths **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Autodiscover\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Eas\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Ecp\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Ews\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Mapi\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Oab\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Owa\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\OwaCalendar\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\PowerShell\\*.log** and **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\RpcHttp\\*.log** . Click Next.\n4. Enter **ExchangeHttpProxy** as Table name and click Next.\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Parsers are automatically deployed with the solution. Follow the steps to create the Kusto Functions alias : [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)"", ""instructions"": [{""parameters"": {""title"": ""Parsers are automatically deployed during Solution deployment. If you want to deploy manually, follow the steps below"", ""instructionSteps"": [{""title"": ""Manual Parser Deployment"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""1. Download the Parser file"", ""description"": ""The latest version of the file [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)""}, {""title"": ""2. Create Parser **ExchangeAdminAuditLogs** function"", ""description"": ""In 'Logs' explorer of your Microsoft Sentinel's log analytics, copy the content of the file to Log explorer""}, {""title"": ""3. Save Parser **ExchangeAdminAuditLogs** function"", ""description"": ""Click on save button.\n No parameter is needed for this parser.\nClick save again.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""Azure Log Analytics will be deprecated, to collect data from non-Azure VMs, Azure Arc is recommended. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""name"": ""Detailled documentation"", ""description"": "">**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-ExchangeAdminAuditLogEvents.json","true" +"ExchangeHttpProxy_CL","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-ExchangeAdminAuditLogEvents","Microsoft","[Deprecated] Microsoft Exchange Logs and Events","Deprecated, use the 'ESI-Opt' dataconnectors. You can stream all Exchange Audit events, IIS Logs, HTTP Proxy logs and Security Event logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to view dashboards, create custom alerts, and improve investigation. This is used by Microsoft Exchange Security Workbooks to provide security insights of your On-Premises Exchange environment","[{""description"": "">**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions)""}, {""title"": ""1. Download and install the agents needed to collect logs for Microsoft Sentinel"", ""description"": ""Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Deploy Monitor Agents"", ""description"": ""This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers"", ""instructions"": [{""parameters"": {""title"": ""Select which agent you want to install in your servers to collect logs:"", ""instructionSteps"": [{""title"": ""[Prefered] Azure Monitor Agent via Azure Arc"", ""description"": ""**Deploy the Azure Arc Agent**\n> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""title"": ""Install Azure Log Analytics Agent (Deprecated on 31/08/2024)"", ""description"": ""1. Download the Azure Log Analytics Agent and choose the deployment method in the below link."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Deploy log injestion following choosed options"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""[Option 1] MS Exchange Management Log collection"", ""description"": ""Select how to stream MS Exchange Admin Audit event logs"", ""instructions"": [{""parameters"": {""title"": ""MS Exchange Admin Audit event logs"", ""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Microsoft Exchange Admin Audit Events logs are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCR.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption1-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCR, Type Event log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Custom' option, enter 'MSExchange Management' as expression and Add it.\n6. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace **Legacy agents management**, select **Windows Event logs**.\n2. Click **Add Windows event log** and enter **MSExchange Management** as log name.\n3. Collect Error, Warning and Information types\n4. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 2] Security/Application/System logs of Exchange Servers"", ""description"": ""Select how to stream Security/Application/System logs of Exchange Servers"", ""instructions"": [{""parameters"": {""title"": ""Security Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - Security Event logs"", ""description"": ""**Enable data collection rule for Security Logs**\nSecurity Events logs are collected only from **Windows** agents.\n1. Add Exchange Servers on *Resources* tab.\n2. Select Security log level\n\n> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition."", ""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 0}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""title"": ""Application and System Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Application and System Events logs are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCR.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCR, Type Event log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Basic' option.\n6. For Application, select 'Critical', 'Error' and 'Warning'. For System, select Critical/Error/Warning/Information. \n7. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Windows Event logs**.\n2. Click **Add Windows event log** and search **Application** as log name.\n3. Click **Add Windows event log** and search **System** as log name.\n4. Collect Error (for all), Warning (for all) and Information (for System) types\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 3 and 4] Security logs of Domain Controllers"", ""description"": ""Select how to stream Security logs of Domain Controllers. If you want to implement Option 3, you just need to select DC on same site as Exchange Servers. If you want to implement Option 4, you can select all DCs of your forest."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""[Option 3] List only Domain Controllers on the same site as Exchange Servers for next step"", ""description"": ""**This limits the quantity of data injested but some incident can't be detected.**""}, {""title"": ""[Option 4] List all Domain Controllers of your Active-Directory Forest for next step"", ""description"": ""**This allows collecting all security events**""}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""title"": ""Security Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - Security Event logs"", ""description"": ""**Enable data collection rule for Security Logs**\nSecurity Events logs are collected only from **Windows** agents.\n1. Add chosen DCs on *Resources* tab.\n2. Select Security log level\n\n> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition."", ""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 0}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 5] IIS logs of Exchange Servers"", ""description"": ""Select how to stream IIS logs of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> IIS logs are collected only from **Windows** agents."", ""instructions"": [{""type"": ""AdminAuditEvents""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption5-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create DCR, Type IIS log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. Select the created DCE. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'IIS logs' (Do not enter a path if IIS Logs path is configured by default). Click on 'Add data source'\n6. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **IIS Logs**.\n2. Check **Collect W3C format IIS log files**\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 6] Message Tracking of Exchange Servers"", ""description"": ""Select how to stream Message Tracking of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Message Tracking are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""text"": ""**Attention**, Custom logs in Monitor Agent is in Preview. The deployment doesn't work as expected for the moment (March 2023)."", ""inline"": false}, ""type"": ""InfoMessage""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule and Custom Table"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption6-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE, like ESI-ExchangeServers. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create Custom DCR Table"", ""description"": ""1. Download the Example file from [Microsoft Sentinel GitHub](https://aka.ms/Sentinel-Sample-ESI-MessageTrackingExampleFile).\n2. From the Azure Portal, navigate to [Workspace Analytics](https://portal.azure.com/#view/HubsExtension/BrowseResource/resourceType/Microsoft.OperationalInsights%2Fworkspaces) and select your target Workspace.\n3. Click in 'Tables', click **+ Create** at the top and select **New Custom log (DCR-Based)**.\n4. In the **Basics** tab, enter **MessageTrackingLog** on the Table name, create a Data Collection rule with the name **DCR-Option6-MessageTrackingLogs** (for example) and select the previously created Data collection Endpoint.\n5. In the **Schema and Transformation** tab, choose the downloaded sample file and click on **Transformation Editor**.\n6. In the transformation field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(['date-time'])\n| extend\n clientHostname = ['client-hostname'],\n clientIP = ['client-ip'],\n connectorId = ['connector-id'],\n customData = ['custom-data'],\n eventId = ['event-id'],\n internalMessageId = ['internal-message-id'],\n logId = ['log-id'],\n messageId = ['message-id'],\n messageInfo = ['message-info'],\n messageSubject = ['message-subject'],\n networkMessageId = ['network-message-id'],\n originalClientIp = ['original-client-ip'],\n originalServerIp = ['original-server-ip'],\n recipientAddress= ['recipient-address'],\n recipientCount= ['recipient-count'],\n recipientStatus= ['recipient-status'],\n relatedRecipientAddress= ['related-recipient-address'],\n returnPath= ['return-path'],\n senderAddress= ['sender-address'],\n senderHostname= ['server-hostname'],\n serverIp= ['server-ip'],\n sourceContext= ['source-context'],\n schemaVersion=['schema-version'],\n messageTrackingTenantId = ['tenant-id'],\n totalBytes = ['total-bytes'],\n transportTrafficType = ['transport-traffic-type']\n| project-away\n ['client-ip'],\n ['client-hostname'],\n ['connector-id'],\n ['custom-data'],\n ['date-time'],\n ['event-id'],\n ['internal-message-id'],\n ['log-id'],\n ['message-id'],\n ['message-info'],\n ['message-subject'],\n ['network-message-id'],\n ['original-client-ip'],\n ['original-server-ip'],\n ['recipient-address'],\n ['recipient-count'],\n ['recipient-status'],\n ['related-recipient-address'],\n ['return-path'],\n ['sender-address'],\n ['server-hostname'],\n ['server-ip'],\n ['source-context'],\n ['schema-version'],\n ['tenant-id'],\n ['total-bytes'],\n ['transport-traffic-type']*\n\n8. Click 'Run' and after 'Apply'.\n9. Click **Next**, then click **Create**.""}, {""title"": ""C. Modify the created DCR, Type Custom log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Select the previously created DCR, like **DCR-Option6-MessageTrackingLogs**.\n3. In the **Resources** tab, enter you Exchange Servers.\n4. In **Data Sources**, add a Data Source type 'Custom Text logs' and enter 'C:\\Program Files\\Microsoft\\Exchange Server\\V15\\TransportRoles\\Logs\\MessageTracking\\*.log' in file pattern, 'MessageTrackingLog_CL' in Table Name.\n6.in Transform field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(['date-time'])\n| extend\n clientHostname = ['client-hostname'],\n clientIP = ['client-ip'],\n connectorId = ['connector-id'],\n customData = ['custom-data'],\n eventId = ['event-id'],\n internalMessageId = ['internal-message-id'],\n logId = ['log-id'],\n messageId = ['message-id'],\n messageInfo = ['message-info'],\n messageSubject = ['message-subject'],\n networkMessageId = ['network-message-id'],\n originalClientIp = ['original-client-ip'],\n originalServerIp = ['original-server-ip'],\n recipientAddress= ['recipient-address'],\n recipientCount= ['recipient-count'],\n recipientStatus= ['recipient-status'],\n relatedRecipientAddress= ['related-recipient-address'],\n returnPath= ['return-path'],\n senderAddress= ['sender-address'],\n senderHostname= ['server-hostname'],\n serverIp= ['server-ip'],\n sourceContext= ['source-context'],\n schemaVersion=['schema-version'],\n messageTrackingTenantId = ['tenant-id'],\n totalBytes = ['total-bytes'],\n transportTrafficType = ['transport-traffic-type']\n| project-away\n ['client-ip'],\n ['client-hostname'],\n ['connector-id'],\n ['custom-data'],\n ['date-time'],\n ['event-id'],\n ['internal-message-id'],\n ['log-id'],\n ['message-id'],\n ['message-info'],\n ['message-subject'],\n ['network-message-id'],\n ['original-client-ip'],\n ['original-server-ip'],\n ['recipient-address'],\n ['recipient-count'],\n ['recipient-status'],\n ['related-recipient-address'],\n ['return-path'],\n ['sender-address'],\n ['server-hostname'],\n ['server-ip'],\n ['source-context'],\n ['schema-version'],\n ['tenant-id'],\n ['total-bytes'],\n ['transport-traffic-type']* \n7. Click on 'Add data source'.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\n1. Under workspace **Settings** part, select **Tables**, click **+ Create** and click on **New custom log (MMA-Based)**.\n2. Select Sample file **[MessageTracking Sample](https://aka.ms/Sentinel-Sample-ESI-MessageTrackingLogsSampleCSV)** and click Next\n3. Select type **Windows** and enter the path **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\TransportRoles\\Logs\\MessageTracking\\*.log**. Click Next.\n4. Enter **MessageTrackingLog** as Table name and click Next.\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 7] HTTP Proxy of Exchange Servers"", ""description"": ""Select how to stream HTTP Proxy of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Message Tracking are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""text"": ""**Attention**, Custom logs in Monitor Agent is in Preview. The deployment doesn't work as expected for the moment (March 2023)."", ""inline"": false}, ""type"": ""InfoMessage""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption7-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create Custom DCR Table"", ""description"": ""1. Download the Example file from [Microsoft Sentinel GitHub](https://aka.ms/Sentinel-Sample-ESI-HTTPProxyExampleFile).\n2. From the Azure Portal, navigate to [Workspace Analytics](https://portal.azure.com/#view/HubsExtension/BrowseResource/resourceType/Microsoft.OperationalInsights%2Fworkspaces) and select your target Workspace.\n3. Click in 'Tables', click **+ Create** at the top and select **New Custom log (DCR-Based)**.\n4. In the **Basics** tab, enter **ExchangeHttpProxy** on the Table name, create a Data Collection rule with the name **DCR-Option7-HTTPProxyLogs** (for example) and select the previously created Data collection Endpoint.\n5. In the **Schema and Transformation** tab, choose the downloaded sample file and click on **Transformation Editor**.\n6. In the transformation field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(DateTime)\n| project-away DateTime\n*\n\n8. Click 'Run' and after 'Apply'.\n9. Click **Next**, then click **Create**.""}, {""title"": ""C. Modify the created DCR, Type Custom log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Select the previously created DCR, like **DCR-Option7-HTTPProxyLogs**.\n3. In the **Resources** tab, enter you Exchange Servers.\n4. In **Data Sources**, add a Data Source type 'Custom Text logs' and enter 'C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Autodiscover\\*.log' in file pattern, 'ExchangeHttpProxy_CL' in Table Name.\n6.in Transform field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(DateTime)\n| project-away DateTime* \n7. Click on 'Add data source'.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\n1. Under workspace **Settings** part, select **Tables**, click **+ Create** and click on **New custom log (MMA-Based)**.\n2. Select Sample file **[MessageTracking Sample](https://aka.ms/Sentinel-Sample-ESI-HttpProxySampleCSV)** and click Next\n3. Select type **Windows** and enter all the following paths **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Autodiscover\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Eas\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Ecp\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Ews\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Mapi\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Oab\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Owa\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\OwaCalendar\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\PowerShell\\*.log** and **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\RpcHttp\\*.log** . Click Next.\n4. Enter **ExchangeHttpProxy** as Table name and click Next.\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Parsers are automatically deployed with the solution. Follow the steps to create the Kusto Functions alias : [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)"", ""instructions"": [{""parameters"": {""title"": ""Parsers are automatically deployed during Solution deployment. If you want to deploy manually, follow the steps below"", ""instructionSteps"": [{""title"": ""Manual Parser Deployment"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""1. Download the Parser file"", ""description"": ""The latest version of the file [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)""}, {""title"": ""2. Create Parser **ExchangeAdminAuditLogs** function"", ""description"": ""In 'Logs' explorer of your Microsoft Sentinel's log analytics, copy the content of the file to Log explorer""}, {""title"": ""3. Save Parser **ExchangeAdminAuditLogs** function"", ""description"": ""Click on save button.\n No parameter is needed for this parser.\nClick save again.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""Azure Log Analytics will be deprecated, to collect data from non-Azure VMs, Azure Arc is recommended. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""name"": ""Detailled documentation"", ""description"": "">**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-ExchangeAdminAuditLogEvents.json","true" +"MessageTrackingLog_CL","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-ExchangeAdminAuditLogEvents","Microsoft","[Deprecated] Microsoft Exchange Logs and Events","Deprecated, use the 'ESI-Opt' dataconnectors. You can stream all Exchange Audit events, IIS Logs, HTTP Proxy logs and Security Event logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to view dashboards, create custom alerts, and improve investigation. This is used by Microsoft Exchange Security Workbooks to provide security insights of your On-Premises Exchange environment","[{""description"": "">**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions)""}, {""title"": ""1. Download and install the agents needed to collect logs for Microsoft Sentinel"", ""description"": ""Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Deploy Monitor Agents"", ""description"": ""This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers"", ""instructions"": [{""parameters"": {""title"": ""Select which agent you want to install in your servers to collect logs:"", ""instructionSteps"": [{""title"": ""[Prefered] Azure Monitor Agent via Azure Arc"", ""description"": ""**Deploy the Azure Arc Agent**\n> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""title"": ""Install Azure Log Analytics Agent (Deprecated on 31/08/2024)"", ""description"": ""1. Download the Azure Log Analytics Agent and choose the deployment method in the below link."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Deploy log injestion following choosed options"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""[Option 1] MS Exchange Management Log collection"", ""description"": ""Select how to stream MS Exchange Admin Audit event logs"", ""instructions"": [{""parameters"": {""title"": ""MS Exchange Admin Audit event logs"", ""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Microsoft Exchange Admin Audit Events logs are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCR.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption1-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCR, Type Event log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Custom' option, enter 'MSExchange Management' as expression and Add it.\n6. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace **Legacy agents management**, select **Windows Event logs**.\n2. Click **Add Windows event log** and enter **MSExchange Management** as log name.\n3. Collect Error, Warning and Information types\n4. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 2] Security/Application/System logs of Exchange Servers"", ""description"": ""Select how to stream Security/Application/System logs of Exchange Servers"", ""instructions"": [{""parameters"": {""title"": ""Security Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - Security Event logs"", ""description"": ""**Enable data collection rule for Security Logs**\nSecurity Events logs are collected only from **Windows** agents.\n1. Add Exchange Servers on *Resources* tab.\n2. Select Security log level\n\n> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition."", ""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 0}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""title"": ""Application and System Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Application and System Events logs are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCR.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCR, Type Event log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Basic' option.\n6. For Application, select 'Critical', 'Error' and 'Warning'. For System, select Critical/Error/Warning/Information. \n7. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Windows Event logs**.\n2. Click **Add Windows event log** and search **Application** as log name.\n3. Click **Add Windows event log** and search **System** as log name.\n4. Collect Error (for all), Warning (for all) and Information (for System) types\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 3 and 4] Security logs of Domain Controllers"", ""description"": ""Select how to stream Security logs of Domain Controllers. If you want to implement Option 3, you just need to select DC on same site as Exchange Servers. If you want to implement Option 4, you can select all DCs of your forest."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""[Option 3] List only Domain Controllers on the same site as Exchange Servers for next step"", ""description"": ""**This limits the quantity of data injested but some incident can't be detected.**""}, {""title"": ""[Option 4] List all Domain Controllers of your Active-Directory Forest for next step"", ""description"": ""**This allows collecting all security events**""}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""title"": ""Security Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - Security Event logs"", ""description"": ""**Enable data collection rule for Security Logs**\nSecurity Events logs are collected only from **Windows** agents.\n1. Add chosen DCs on *Resources* tab.\n2. Select Security log level\n\n> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition."", ""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 0}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 5] IIS logs of Exchange Servers"", ""description"": ""Select how to stream IIS logs of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> IIS logs are collected only from **Windows** agents."", ""instructions"": [{""type"": ""AdminAuditEvents""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption5-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create DCR, Type IIS log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. Select the created DCE. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'IIS logs' (Do not enter a path if IIS Logs path is configured by default). Click on 'Add data source'\n6. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **IIS Logs**.\n2. Check **Collect W3C format IIS log files**\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 6] Message Tracking of Exchange Servers"", ""description"": ""Select how to stream Message Tracking of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Message Tracking are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""text"": ""**Attention**, Custom logs in Monitor Agent is in Preview. The deployment doesn't work as expected for the moment (March 2023)."", ""inline"": false}, ""type"": ""InfoMessage""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule and Custom Table"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption6-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE, like ESI-ExchangeServers. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create Custom DCR Table"", ""description"": ""1. Download the Example file from [Microsoft Sentinel GitHub](https://aka.ms/Sentinel-Sample-ESI-MessageTrackingExampleFile).\n2. From the Azure Portal, navigate to [Workspace Analytics](https://portal.azure.com/#view/HubsExtension/BrowseResource/resourceType/Microsoft.OperationalInsights%2Fworkspaces) and select your target Workspace.\n3. Click in 'Tables', click **+ Create** at the top and select **New Custom log (DCR-Based)**.\n4. In the **Basics** tab, enter **MessageTrackingLog** on the Table name, create a Data Collection rule with the name **DCR-Option6-MessageTrackingLogs** (for example) and select the previously created Data collection Endpoint.\n5. In the **Schema and Transformation** tab, choose the downloaded sample file and click on **Transformation Editor**.\n6. In the transformation field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(['date-time'])\n| extend\n clientHostname = ['client-hostname'],\n clientIP = ['client-ip'],\n connectorId = ['connector-id'],\n customData = ['custom-data'],\n eventId = ['event-id'],\n internalMessageId = ['internal-message-id'],\n logId = ['log-id'],\n messageId = ['message-id'],\n messageInfo = ['message-info'],\n messageSubject = ['message-subject'],\n networkMessageId = ['network-message-id'],\n originalClientIp = ['original-client-ip'],\n originalServerIp = ['original-server-ip'],\n recipientAddress= ['recipient-address'],\n recipientCount= ['recipient-count'],\n recipientStatus= ['recipient-status'],\n relatedRecipientAddress= ['related-recipient-address'],\n returnPath= ['return-path'],\n senderAddress= ['sender-address'],\n senderHostname= ['server-hostname'],\n serverIp= ['server-ip'],\n sourceContext= ['source-context'],\n schemaVersion=['schema-version'],\n messageTrackingTenantId = ['tenant-id'],\n totalBytes = ['total-bytes'],\n transportTrafficType = ['transport-traffic-type']\n| project-away\n ['client-ip'],\n ['client-hostname'],\n ['connector-id'],\n ['custom-data'],\n ['date-time'],\n ['event-id'],\n ['internal-message-id'],\n ['log-id'],\n ['message-id'],\n ['message-info'],\n ['message-subject'],\n ['network-message-id'],\n ['original-client-ip'],\n ['original-server-ip'],\n ['recipient-address'],\n ['recipient-count'],\n ['recipient-status'],\n ['related-recipient-address'],\n ['return-path'],\n ['sender-address'],\n ['server-hostname'],\n ['server-ip'],\n ['source-context'],\n ['schema-version'],\n ['tenant-id'],\n ['total-bytes'],\n ['transport-traffic-type']*\n\n8. Click 'Run' and after 'Apply'.\n9. Click **Next**, then click **Create**.""}, {""title"": ""C. Modify the created DCR, Type Custom log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Select the previously created DCR, like **DCR-Option6-MessageTrackingLogs**.\n3. In the **Resources** tab, enter you Exchange Servers.\n4. In **Data Sources**, add a Data Source type 'Custom Text logs' and enter 'C:\\Program Files\\Microsoft\\Exchange Server\\V15\\TransportRoles\\Logs\\MessageTracking\\*.log' in file pattern, 'MessageTrackingLog_CL' in Table Name.\n6.in Transform field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(['date-time'])\n| extend\n clientHostname = ['client-hostname'],\n clientIP = ['client-ip'],\n connectorId = ['connector-id'],\n customData = ['custom-data'],\n eventId = ['event-id'],\n internalMessageId = ['internal-message-id'],\n logId = ['log-id'],\n messageId = ['message-id'],\n messageInfo = ['message-info'],\n messageSubject = ['message-subject'],\n networkMessageId = ['network-message-id'],\n originalClientIp = ['original-client-ip'],\n originalServerIp = ['original-server-ip'],\n recipientAddress= ['recipient-address'],\n recipientCount= ['recipient-count'],\n recipientStatus= ['recipient-status'],\n relatedRecipientAddress= ['related-recipient-address'],\n returnPath= ['return-path'],\n senderAddress= ['sender-address'],\n senderHostname= ['server-hostname'],\n serverIp= ['server-ip'],\n sourceContext= ['source-context'],\n schemaVersion=['schema-version'],\n messageTrackingTenantId = ['tenant-id'],\n totalBytes = ['total-bytes'],\n transportTrafficType = ['transport-traffic-type']\n| project-away\n ['client-ip'],\n ['client-hostname'],\n ['connector-id'],\n ['custom-data'],\n ['date-time'],\n ['event-id'],\n ['internal-message-id'],\n ['log-id'],\n ['message-id'],\n ['message-info'],\n ['message-subject'],\n ['network-message-id'],\n ['original-client-ip'],\n ['original-server-ip'],\n ['recipient-address'],\n ['recipient-count'],\n ['recipient-status'],\n ['related-recipient-address'],\n ['return-path'],\n ['sender-address'],\n ['server-hostname'],\n ['server-ip'],\n ['source-context'],\n ['schema-version'],\n ['tenant-id'],\n ['total-bytes'],\n ['transport-traffic-type']* \n7. Click on 'Add data source'.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\n1. Under workspace **Settings** part, select **Tables**, click **+ Create** and click on **New custom log (MMA-Based)**.\n2. Select Sample file **[MessageTracking Sample](https://aka.ms/Sentinel-Sample-ESI-MessageTrackingLogsSampleCSV)** and click Next\n3. Select type **Windows** and enter the path **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\TransportRoles\\Logs\\MessageTracking\\*.log**. Click Next.\n4. Enter **MessageTrackingLog** as Table name and click Next.\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 7] HTTP Proxy of Exchange Servers"", ""description"": ""Select how to stream HTTP Proxy of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Message Tracking are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""text"": ""**Attention**, Custom logs in Monitor Agent is in Preview. The deployment doesn't work as expected for the moment (March 2023)."", ""inline"": false}, ""type"": ""InfoMessage""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption7-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create Custom DCR Table"", ""description"": ""1. Download the Example file from [Microsoft Sentinel GitHub](https://aka.ms/Sentinel-Sample-ESI-HTTPProxyExampleFile).\n2. From the Azure Portal, navigate to [Workspace Analytics](https://portal.azure.com/#view/HubsExtension/BrowseResource/resourceType/Microsoft.OperationalInsights%2Fworkspaces) and select your target Workspace.\n3. Click in 'Tables', click **+ Create** at the top and select **New Custom log (DCR-Based)**.\n4. In the **Basics** tab, enter **ExchangeHttpProxy** on the Table name, create a Data Collection rule with the name **DCR-Option7-HTTPProxyLogs** (for example) and select the previously created Data collection Endpoint.\n5. In the **Schema and Transformation** tab, choose the downloaded sample file and click on **Transformation Editor**.\n6. In the transformation field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(DateTime)\n| project-away DateTime\n*\n\n8. Click 'Run' and after 'Apply'.\n9. Click **Next**, then click **Create**.""}, {""title"": ""C. Modify the created DCR, Type Custom log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Select the previously created DCR, like **DCR-Option7-HTTPProxyLogs**.\n3. In the **Resources** tab, enter you Exchange Servers.\n4. In **Data Sources**, add a Data Source type 'Custom Text logs' and enter 'C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Autodiscover\\*.log' in file pattern, 'ExchangeHttpProxy_CL' in Table Name.\n6.in Transform field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(DateTime)\n| project-away DateTime* \n7. Click on 'Add data source'.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\n1. Under workspace **Settings** part, select **Tables**, click **+ Create** and click on **New custom log (MMA-Based)**.\n2. Select Sample file **[MessageTracking Sample](https://aka.ms/Sentinel-Sample-ESI-HttpProxySampleCSV)** and click Next\n3. Select type **Windows** and enter all the following paths **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Autodiscover\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Eas\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Ecp\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Ews\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Mapi\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Oab\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Owa\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\OwaCalendar\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\PowerShell\\*.log** and **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\RpcHttp\\*.log** . Click Next.\n4. Enter **ExchangeHttpProxy** as Table name and click Next.\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Parsers are automatically deployed with the solution. Follow the steps to create the Kusto Functions alias : [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)"", ""instructions"": [{""parameters"": {""title"": ""Parsers are automatically deployed during Solution deployment. If you want to deploy manually, follow the steps below"", ""instructionSteps"": [{""title"": ""Manual Parser Deployment"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""1. Download the Parser file"", ""description"": ""The latest version of the file [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)""}, {""title"": ""2. Create Parser **ExchangeAdminAuditLogs** function"", ""description"": ""In 'Logs' explorer of your Microsoft Sentinel's log analytics, copy the content of the file to Log explorer""}, {""title"": ""3. Save Parser **ExchangeAdminAuditLogs** function"", ""description"": ""Click on save button.\n No parameter is needed for this parser.\nClick save again.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""Azure Log Analytics will be deprecated, to collect data from non-Azure VMs, Azure Arc is recommended. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""name"": ""Detailled documentation"", ""description"": "">**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-ExchangeAdminAuditLogEvents.json","true" +"SecurityEvent","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-ExchangeAdminAuditLogEvents","Microsoft","[Deprecated] Microsoft Exchange Logs and Events","Deprecated, use the 'ESI-Opt' dataconnectors. You can stream all Exchange Audit events, IIS Logs, HTTP Proxy logs and Security Event logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to view dashboards, create custom alerts, and improve investigation. This is used by Microsoft Exchange Security Workbooks to provide security insights of your On-Premises Exchange environment","[{""description"": "">**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions)""}, {""title"": ""1. Download and install the agents needed to collect logs for Microsoft Sentinel"", ""description"": ""Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Deploy Monitor Agents"", ""description"": ""This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers"", ""instructions"": [{""parameters"": {""title"": ""Select which agent you want to install in your servers to collect logs:"", ""instructionSteps"": [{""title"": ""[Prefered] Azure Monitor Agent via Azure Arc"", ""description"": ""**Deploy the Azure Arc Agent**\n> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""title"": ""Install Azure Log Analytics Agent (Deprecated on 31/08/2024)"", ""description"": ""1. Download the Azure Log Analytics Agent and choose the deployment method in the below link."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Deploy log injestion following choosed options"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""[Option 1] MS Exchange Management Log collection"", ""description"": ""Select how to stream MS Exchange Admin Audit event logs"", ""instructions"": [{""parameters"": {""title"": ""MS Exchange Admin Audit event logs"", ""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Microsoft Exchange Admin Audit Events logs are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCR.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption1-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCR, Type Event log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Custom' option, enter 'MSExchange Management' as expression and Add it.\n6. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace **Legacy agents management**, select **Windows Event logs**.\n2. Click **Add Windows event log** and enter **MSExchange Management** as log name.\n3. Collect Error, Warning and Information types\n4. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 2] Security/Application/System logs of Exchange Servers"", ""description"": ""Select how to stream Security/Application/System logs of Exchange Servers"", ""instructions"": [{""parameters"": {""title"": ""Security Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - Security Event logs"", ""description"": ""**Enable data collection rule for Security Logs**\nSecurity Events logs are collected only from **Windows** agents.\n1. Add Exchange Servers on *Resources* tab.\n2. Select Security log level\n\n> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition."", ""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 0}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""title"": ""Application and System Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Application and System Events logs are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCR.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCR, Type Event log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Basic' option.\n6. For Application, select 'Critical', 'Error' and 'Warning'. For System, select Critical/Error/Warning/Information. \n7. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Windows Event logs**.\n2. Click **Add Windows event log** and search **Application** as log name.\n3. Click **Add Windows event log** and search **System** as log name.\n4. Collect Error (for all), Warning (for all) and Information (for System) types\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 3 and 4] Security logs of Domain Controllers"", ""description"": ""Select how to stream Security logs of Domain Controllers. If you want to implement Option 3, you just need to select DC on same site as Exchange Servers. If you want to implement Option 4, you can select all DCs of your forest."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""[Option 3] List only Domain Controllers on the same site as Exchange Servers for next step"", ""description"": ""**This limits the quantity of data injested but some incident can't be detected.**""}, {""title"": ""[Option 4] List all Domain Controllers of your Active-Directory Forest for next step"", ""description"": ""**This allows collecting all security events**""}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""title"": ""Security Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - Security Event logs"", ""description"": ""**Enable data collection rule for Security Logs**\nSecurity Events logs are collected only from **Windows** agents.\n1. Add chosen DCs on *Resources* tab.\n2. Select Security log level\n\n> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition."", ""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 0}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 5] IIS logs of Exchange Servers"", ""description"": ""Select how to stream IIS logs of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> IIS logs are collected only from **Windows** agents."", ""instructions"": [{""type"": ""AdminAuditEvents""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption5-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create DCR, Type IIS log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. Select the created DCE. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'IIS logs' (Do not enter a path if IIS Logs path is configured by default). Click on 'Add data source'\n6. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **IIS Logs**.\n2. Check **Collect W3C format IIS log files**\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 6] Message Tracking of Exchange Servers"", ""description"": ""Select how to stream Message Tracking of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Message Tracking are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""text"": ""**Attention**, Custom logs in Monitor Agent is in Preview. The deployment doesn't work as expected for the moment (March 2023)."", ""inline"": false}, ""type"": ""InfoMessage""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule and Custom Table"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption6-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE, like ESI-ExchangeServers. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create Custom DCR Table"", ""description"": ""1. Download the Example file from [Microsoft Sentinel GitHub](https://aka.ms/Sentinel-Sample-ESI-MessageTrackingExampleFile).\n2. From the Azure Portal, navigate to [Workspace Analytics](https://portal.azure.com/#view/HubsExtension/BrowseResource/resourceType/Microsoft.OperationalInsights%2Fworkspaces) and select your target Workspace.\n3. Click in 'Tables', click **+ Create** at the top and select **New Custom log (DCR-Based)**.\n4. In the **Basics** tab, enter **MessageTrackingLog** on the Table name, create a Data Collection rule with the name **DCR-Option6-MessageTrackingLogs** (for example) and select the previously created Data collection Endpoint.\n5. In the **Schema and Transformation** tab, choose the downloaded sample file and click on **Transformation Editor**.\n6. In the transformation field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(['date-time'])\n| extend\n clientHostname = ['client-hostname'],\n clientIP = ['client-ip'],\n connectorId = ['connector-id'],\n customData = ['custom-data'],\n eventId = ['event-id'],\n internalMessageId = ['internal-message-id'],\n logId = ['log-id'],\n messageId = ['message-id'],\n messageInfo = ['message-info'],\n messageSubject = ['message-subject'],\n networkMessageId = ['network-message-id'],\n originalClientIp = ['original-client-ip'],\n originalServerIp = ['original-server-ip'],\n recipientAddress= ['recipient-address'],\n recipientCount= ['recipient-count'],\n recipientStatus= ['recipient-status'],\n relatedRecipientAddress= ['related-recipient-address'],\n returnPath= ['return-path'],\n senderAddress= ['sender-address'],\n senderHostname= ['server-hostname'],\n serverIp= ['server-ip'],\n sourceContext= ['source-context'],\n schemaVersion=['schema-version'],\n messageTrackingTenantId = ['tenant-id'],\n totalBytes = ['total-bytes'],\n transportTrafficType = ['transport-traffic-type']\n| project-away\n ['client-ip'],\n ['client-hostname'],\n ['connector-id'],\n ['custom-data'],\n ['date-time'],\n ['event-id'],\n ['internal-message-id'],\n ['log-id'],\n ['message-id'],\n ['message-info'],\n ['message-subject'],\n ['network-message-id'],\n ['original-client-ip'],\n ['original-server-ip'],\n ['recipient-address'],\n ['recipient-count'],\n ['recipient-status'],\n ['related-recipient-address'],\n ['return-path'],\n ['sender-address'],\n ['server-hostname'],\n ['server-ip'],\n ['source-context'],\n ['schema-version'],\n ['tenant-id'],\n ['total-bytes'],\n ['transport-traffic-type']*\n\n8. Click 'Run' and after 'Apply'.\n9. Click **Next**, then click **Create**.""}, {""title"": ""C. Modify the created DCR, Type Custom log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Select the previously created DCR, like **DCR-Option6-MessageTrackingLogs**.\n3. In the **Resources** tab, enter you Exchange Servers.\n4. In **Data Sources**, add a Data Source type 'Custom Text logs' and enter 'C:\\Program Files\\Microsoft\\Exchange Server\\V15\\TransportRoles\\Logs\\MessageTracking\\*.log' in file pattern, 'MessageTrackingLog_CL' in Table Name.\n6.in Transform field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(['date-time'])\n| extend\n clientHostname = ['client-hostname'],\n clientIP = ['client-ip'],\n connectorId = ['connector-id'],\n customData = ['custom-data'],\n eventId = ['event-id'],\n internalMessageId = ['internal-message-id'],\n logId = ['log-id'],\n messageId = ['message-id'],\n messageInfo = ['message-info'],\n messageSubject = ['message-subject'],\n networkMessageId = ['network-message-id'],\n originalClientIp = ['original-client-ip'],\n originalServerIp = ['original-server-ip'],\n recipientAddress= ['recipient-address'],\n recipientCount= ['recipient-count'],\n recipientStatus= ['recipient-status'],\n relatedRecipientAddress= ['related-recipient-address'],\n returnPath= ['return-path'],\n senderAddress= ['sender-address'],\n senderHostname= ['server-hostname'],\n serverIp= ['server-ip'],\n sourceContext= ['source-context'],\n schemaVersion=['schema-version'],\n messageTrackingTenantId = ['tenant-id'],\n totalBytes = ['total-bytes'],\n transportTrafficType = ['transport-traffic-type']\n| project-away\n ['client-ip'],\n ['client-hostname'],\n ['connector-id'],\n ['custom-data'],\n ['date-time'],\n ['event-id'],\n ['internal-message-id'],\n ['log-id'],\n ['message-id'],\n ['message-info'],\n ['message-subject'],\n ['network-message-id'],\n ['original-client-ip'],\n ['original-server-ip'],\n ['recipient-address'],\n ['recipient-count'],\n ['recipient-status'],\n ['related-recipient-address'],\n ['return-path'],\n ['sender-address'],\n ['server-hostname'],\n ['server-ip'],\n ['source-context'],\n ['schema-version'],\n ['tenant-id'],\n ['total-bytes'],\n ['transport-traffic-type']* \n7. Click on 'Add data source'.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\n1. Under workspace **Settings** part, select **Tables**, click **+ Create** and click on **New custom log (MMA-Based)**.\n2. Select Sample file **[MessageTracking Sample](https://aka.ms/Sentinel-Sample-ESI-MessageTrackingLogsSampleCSV)** and click Next\n3. Select type **Windows** and enter the path **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\TransportRoles\\Logs\\MessageTracking\\*.log**. Click Next.\n4. Enter **MessageTrackingLog** as Table name and click Next.\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 7] HTTP Proxy of Exchange Servers"", ""description"": ""Select how to stream HTTP Proxy of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Message Tracking are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""text"": ""**Attention**, Custom logs in Monitor Agent is in Preview. The deployment doesn't work as expected for the moment (March 2023)."", ""inline"": false}, ""type"": ""InfoMessage""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption7-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create Custom DCR Table"", ""description"": ""1. Download the Example file from [Microsoft Sentinel GitHub](https://aka.ms/Sentinel-Sample-ESI-HTTPProxyExampleFile).\n2. From the Azure Portal, navigate to [Workspace Analytics](https://portal.azure.com/#view/HubsExtension/BrowseResource/resourceType/Microsoft.OperationalInsights%2Fworkspaces) and select your target Workspace.\n3. Click in 'Tables', click **+ Create** at the top and select **New Custom log (DCR-Based)**.\n4. In the **Basics** tab, enter **ExchangeHttpProxy** on the Table name, create a Data Collection rule with the name **DCR-Option7-HTTPProxyLogs** (for example) and select the previously created Data collection Endpoint.\n5. In the **Schema and Transformation** tab, choose the downloaded sample file and click on **Transformation Editor**.\n6. In the transformation field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(DateTime)\n| project-away DateTime\n*\n\n8. Click 'Run' and after 'Apply'.\n9. Click **Next**, then click **Create**.""}, {""title"": ""C. Modify the created DCR, Type Custom log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Select the previously created DCR, like **DCR-Option7-HTTPProxyLogs**.\n3. In the **Resources** tab, enter you Exchange Servers.\n4. In **Data Sources**, add a Data Source type 'Custom Text logs' and enter 'C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Autodiscover\\*.log' in file pattern, 'ExchangeHttpProxy_CL' in Table Name.\n6.in Transform field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(DateTime)\n| project-away DateTime* \n7. Click on 'Add data source'.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\n1. Under workspace **Settings** part, select **Tables**, click **+ Create** and click on **New custom log (MMA-Based)**.\n2. Select Sample file **[MessageTracking Sample](https://aka.ms/Sentinel-Sample-ESI-HttpProxySampleCSV)** and click Next\n3. Select type **Windows** and enter all the following paths **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Autodiscover\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Eas\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Ecp\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Ews\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Mapi\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Oab\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Owa\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\OwaCalendar\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\PowerShell\\*.log** and **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\RpcHttp\\*.log** . Click Next.\n4. Enter **ExchangeHttpProxy** as Table name and click Next.\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Parsers are automatically deployed with the solution. Follow the steps to create the Kusto Functions alias : [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)"", ""instructions"": [{""parameters"": {""title"": ""Parsers are automatically deployed during Solution deployment. If you want to deploy manually, follow the steps below"", ""instructionSteps"": [{""title"": ""Manual Parser Deployment"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""1. Download the Parser file"", ""description"": ""The latest version of the file [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)""}, {""title"": ""2. Create Parser **ExchangeAdminAuditLogs** function"", ""description"": ""In 'Logs' explorer of your Microsoft Sentinel's log analytics, copy the content of the file to Log explorer""}, {""title"": ""3. Save Parser **ExchangeAdminAuditLogs** function"", ""description"": ""Click on save button.\n No parameter is needed for this parser.\nClick save again.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""Azure Log Analytics will be deprecated, to collect data from non-Azure VMs, Azure Arc is recommended. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""name"": ""Detailled documentation"", ""description"": "">**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-ExchangeAdminAuditLogEvents.json","true" +"W3CIISLog","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-ExchangeAdminAuditLogEvents","Microsoft","[Deprecated] Microsoft Exchange Logs and Events","Deprecated, use the 'ESI-Opt' dataconnectors. You can stream all Exchange Audit events, IIS Logs, HTTP Proxy logs and Security Event logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to view dashboards, create custom alerts, and improve investigation. This is used by Microsoft Exchange Security Workbooks to provide security insights of your On-Premises Exchange environment","[{""description"": "">**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions)""}, {""title"": ""1. Download and install the agents needed to collect logs for Microsoft Sentinel"", ""description"": ""Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Deploy Monitor Agents"", ""description"": ""This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers"", ""instructions"": [{""parameters"": {""title"": ""Select which agent you want to install in your servers to collect logs:"", ""instructionSteps"": [{""title"": ""[Prefered] Azure Monitor Agent via Azure Arc"", ""description"": ""**Deploy the Azure Arc Agent**\n> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""title"": ""Install Azure Log Analytics Agent (Deprecated on 31/08/2024)"", ""description"": ""1. Download the Azure Log Analytics Agent and choose the deployment method in the below link."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Deploy log injestion following choosed options"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""[Option 1] MS Exchange Management Log collection"", ""description"": ""Select how to stream MS Exchange Admin Audit event logs"", ""instructions"": [{""parameters"": {""title"": ""MS Exchange Admin Audit event logs"", ""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Microsoft Exchange Admin Audit Events logs are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCR.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption1-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCR, Type Event log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Custom' option, enter 'MSExchange Management' as expression and Add it.\n6. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace **Legacy agents management**, select **Windows Event logs**.\n2. Click **Add Windows event log** and enter **MSExchange Management** as log name.\n3. Collect Error, Warning and Information types\n4. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 2] Security/Application/System logs of Exchange Servers"", ""description"": ""Select how to stream Security/Application/System logs of Exchange Servers"", ""instructions"": [{""parameters"": {""title"": ""Security Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - Security Event logs"", ""description"": ""**Enable data collection rule for Security Logs**\nSecurity Events logs are collected only from **Windows** agents.\n1. Add Exchange Servers on *Resources* tab.\n2. Select Security log level\n\n> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition."", ""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 0}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""title"": ""Application and System Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Application and System Events logs are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCR.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCR, Type Event log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Basic' option.\n6. For Application, select 'Critical', 'Error' and 'Warning'. For System, select Critical/Error/Warning/Information. \n7. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Windows Event logs**.\n2. Click **Add Windows event log** and search **Application** as log name.\n3. Click **Add Windows event log** and search **System** as log name.\n4. Collect Error (for all), Warning (for all) and Information (for System) types\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 3 and 4] Security logs of Domain Controllers"", ""description"": ""Select how to stream Security logs of Domain Controllers. If you want to implement Option 3, you just need to select DC on same site as Exchange Servers. If you want to implement Option 4, you can select all DCs of your forest."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""[Option 3] List only Domain Controllers on the same site as Exchange Servers for next step"", ""description"": ""**This limits the quantity of data injested but some incident can't be detected.**""}, {""title"": ""[Option 4] List all Domain Controllers of your Active-Directory Forest for next step"", ""description"": ""**This allows collecting all security events**""}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""title"": ""Security Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - Security Event logs"", ""description"": ""**Enable data collection rule for Security Logs**\nSecurity Events logs are collected only from **Windows** agents.\n1. Add chosen DCs on *Resources* tab.\n2. Select Security log level\n\n> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition."", ""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 0}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 5] IIS logs of Exchange Servers"", ""description"": ""Select how to stream IIS logs of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> IIS logs are collected only from **Windows** agents."", ""instructions"": [{""type"": ""AdminAuditEvents""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption5-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create DCR, Type IIS log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. Select the created DCE. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'IIS logs' (Do not enter a path if IIS Logs path is configured by default). Click on 'Add data source'\n6. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **IIS Logs**.\n2. Check **Collect W3C format IIS log files**\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 6] Message Tracking of Exchange Servers"", ""description"": ""Select how to stream Message Tracking of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Message Tracking are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""text"": ""**Attention**, Custom logs in Monitor Agent is in Preview. The deployment doesn't work as expected for the moment (March 2023)."", ""inline"": false}, ""type"": ""InfoMessage""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule and Custom Table"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption6-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE, like ESI-ExchangeServers. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create Custom DCR Table"", ""description"": ""1. Download the Example file from [Microsoft Sentinel GitHub](https://aka.ms/Sentinel-Sample-ESI-MessageTrackingExampleFile).\n2. From the Azure Portal, navigate to [Workspace Analytics](https://portal.azure.com/#view/HubsExtension/BrowseResource/resourceType/Microsoft.OperationalInsights%2Fworkspaces) and select your target Workspace.\n3. Click in 'Tables', click **+ Create** at the top and select **New Custom log (DCR-Based)**.\n4. In the **Basics** tab, enter **MessageTrackingLog** on the Table name, create a Data Collection rule with the name **DCR-Option6-MessageTrackingLogs** (for example) and select the previously created Data collection Endpoint.\n5. In the **Schema and Transformation** tab, choose the downloaded sample file and click on **Transformation Editor**.\n6. In the transformation field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(['date-time'])\n| extend\n clientHostname = ['client-hostname'],\n clientIP = ['client-ip'],\n connectorId = ['connector-id'],\n customData = ['custom-data'],\n eventId = ['event-id'],\n internalMessageId = ['internal-message-id'],\n logId = ['log-id'],\n messageId = ['message-id'],\n messageInfo = ['message-info'],\n messageSubject = ['message-subject'],\n networkMessageId = ['network-message-id'],\n originalClientIp = ['original-client-ip'],\n originalServerIp = ['original-server-ip'],\n recipientAddress= ['recipient-address'],\n recipientCount= ['recipient-count'],\n recipientStatus= ['recipient-status'],\n relatedRecipientAddress= ['related-recipient-address'],\n returnPath= ['return-path'],\n senderAddress= ['sender-address'],\n senderHostname= ['server-hostname'],\n serverIp= ['server-ip'],\n sourceContext= ['source-context'],\n schemaVersion=['schema-version'],\n messageTrackingTenantId = ['tenant-id'],\n totalBytes = ['total-bytes'],\n transportTrafficType = ['transport-traffic-type']\n| project-away\n ['client-ip'],\n ['client-hostname'],\n ['connector-id'],\n ['custom-data'],\n ['date-time'],\n ['event-id'],\n ['internal-message-id'],\n ['log-id'],\n ['message-id'],\n ['message-info'],\n ['message-subject'],\n ['network-message-id'],\n ['original-client-ip'],\n ['original-server-ip'],\n ['recipient-address'],\n ['recipient-count'],\n ['recipient-status'],\n ['related-recipient-address'],\n ['return-path'],\n ['sender-address'],\n ['server-hostname'],\n ['server-ip'],\n ['source-context'],\n ['schema-version'],\n ['tenant-id'],\n ['total-bytes'],\n ['transport-traffic-type']*\n\n8. Click 'Run' and after 'Apply'.\n9. Click **Next**, then click **Create**.""}, {""title"": ""C. Modify the created DCR, Type Custom log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Select the previously created DCR, like **DCR-Option6-MessageTrackingLogs**.\n3. In the **Resources** tab, enter you Exchange Servers.\n4. In **Data Sources**, add a Data Source type 'Custom Text logs' and enter 'C:\\Program Files\\Microsoft\\Exchange Server\\V15\\TransportRoles\\Logs\\MessageTracking\\*.log' in file pattern, 'MessageTrackingLog_CL' in Table Name.\n6.in Transform field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(['date-time'])\n| extend\n clientHostname = ['client-hostname'],\n clientIP = ['client-ip'],\n connectorId = ['connector-id'],\n customData = ['custom-data'],\n eventId = ['event-id'],\n internalMessageId = ['internal-message-id'],\n logId = ['log-id'],\n messageId = ['message-id'],\n messageInfo = ['message-info'],\n messageSubject = ['message-subject'],\n networkMessageId = ['network-message-id'],\n originalClientIp = ['original-client-ip'],\n originalServerIp = ['original-server-ip'],\n recipientAddress= ['recipient-address'],\n recipientCount= ['recipient-count'],\n recipientStatus= ['recipient-status'],\n relatedRecipientAddress= ['related-recipient-address'],\n returnPath= ['return-path'],\n senderAddress= ['sender-address'],\n senderHostname= ['server-hostname'],\n serverIp= ['server-ip'],\n sourceContext= ['source-context'],\n schemaVersion=['schema-version'],\n messageTrackingTenantId = ['tenant-id'],\n totalBytes = ['total-bytes'],\n transportTrafficType = ['transport-traffic-type']\n| project-away\n ['client-ip'],\n ['client-hostname'],\n ['connector-id'],\n ['custom-data'],\n ['date-time'],\n ['event-id'],\n ['internal-message-id'],\n ['log-id'],\n ['message-id'],\n ['message-info'],\n ['message-subject'],\n ['network-message-id'],\n ['original-client-ip'],\n ['original-server-ip'],\n ['recipient-address'],\n ['recipient-count'],\n ['recipient-status'],\n ['related-recipient-address'],\n ['return-path'],\n ['sender-address'],\n ['server-hostname'],\n ['server-ip'],\n ['source-context'],\n ['schema-version'],\n ['tenant-id'],\n ['total-bytes'],\n ['transport-traffic-type']* \n7. Click on 'Add data source'.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\n1. Under workspace **Settings** part, select **Tables**, click **+ Create** and click on **New custom log (MMA-Based)**.\n2. Select Sample file **[MessageTracking Sample](https://aka.ms/Sentinel-Sample-ESI-MessageTrackingLogsSampleCSV)** and click Next\n3. Select type **Windows** and enter the path **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\TransportRoles\\Logs\\MessageTracking\\*.log**. Click Next.\n4. Enter **MessageTrackingLog** as Table name and click Next.\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 7] HTTP Proxy of Exchange Servers"", ""description"": ""Select how to stream HTTP Proxy of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Message Tracking are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""text"": ""**Attention**, Custom logs in Monitor Agent is in Preview. The deployment doesn't work as expected for the moment (March 2023)."", ""inline"": false}, ""type"": ""InfoMessage""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption7-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create Custom DCR Table"", ""description"": ""1. Download the Example file from [Microsoft Sentinel GitHub](https://aka.ms/Sentinel-Sample-ESI-HTTPProxyExampleFile).\n2. From the Azure Portal, navigate to [Workspace Analytics](https://portal.azure.com/#view/HubsExtension/BrowseResource/resourceType/Microsoft.OperationalInsights%2Fworkspaces) and select your target Workspace.\n3. Click in 'Tables', click **+ Create** at the top and select **New Custom log (DCR-Based)**.\n4. In the **Basics** tab, enter **ExchangeHttpProxy** on the Table name, create a Data Collection rule with the name **DCR-Option7-HTTPProxyLogs** (for example) and select the previously created Data collection Endpoint.\n5. In the **Schema and Transformation** tab, choose the downloaded sample file and click on **Transformation Editor**.\n6. In the transformation field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(DateTime)\n| project-away DateTime\n*\n\n8. Click 'Run' and after 'Apply'.\n9. Click **Next**, then click **Create**.""}, {""title"": ""C. Modify the created DCR, Type Custom log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Select the previously created DCR, like **DCR-Option7-HTTPProxyLogs**.\n3. In the **Resources** tab, enter you Exchange Servers.\n4. In **Data Sources**, add a Data Source type 'Custom Text logs' and enter 'C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Autodiscover\\*.log' in file pattern, 'ExchangeHttpProxy_CL' in Table Name.\n6.in Transform field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(DateTime)\n| project-away DateTime* \n7. Click on 'Add data source'.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\n1. Under workspace **Settings** part, select **Tables**, click **+ Create** and click on **New custom log (MMA-Based)**.\n2. Select Sample file **[MessageTracking Sample](https://aka.ms/Sentinel-Sample-ESI-HttpProxySampleCSV)** and click Next\n3. Select type **Windows** and enter all the following paths **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Autodiscover\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Eas\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Ecp\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Ews\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Mapi\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Oab\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Owa\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\OwaCalendar\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\PowerShell\\*.log** and **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\RpcHttp\\*.log** . Click Next.\n4. Enter **ExchangeHttpProxy** as Table name and click Next.\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Parsers are automatically deployed with the solution. Follow the steps to create the Kusto Functions alias : [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)"", ""instructions"": [{""parameters"": {""title"": ""Parsers are automatically deployed during Solution deployment. If you want to deploy manually, follow the steps below"", ""instructionSteps"": [{""title"": ""Manual Parser Deployment"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""1. Download the Parser file"", ""description"": ""The latest version of the file [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)""}, {""title"": ""2. Create Parser **ExchangeAdminAuditLogs** function"", ""description"": ""In 'Logs' explorer of your Microsoft Sentinel's log analytics, copy the content of the file to Log explorer""}, {""title"": ""3. Save Parser **ExchangeAdminAuditLogs** function"", ""description"": ""Click on save button.\n No parameter is needed for this parser.\nClick save again.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""Azure Log Analytics will be deprecated, to collect data from non-Azure VMs, Azure Arc is recommended. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""name"": ""Detailled documentation"", ""description"": "">**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-ExchangeAdminAuditLogEvents.json","true" +"ESIExchangeConfig_CL","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-ExchangeOnPremisesCollector","Microsoft","Exchange Security Insights On-Premises Collector","Connector used to push Exchange On-Premises Security configuration for Microsoft Sentinel Analysis","[{""title"": ""1. Install the ESI Collector Script on a server with Exchange Admin PowerShell console"", ""description"": ""This is the script that will collect Exchange Information to push content in Microsoft Sentinel.\n "", ""instructions"": [{""parameters"": {""title"": ""Script Deployment"", ""instructionSteps"": [{""title"": ""Download the latest version of ESI Collector"", ""description"": ""The latest version can be found here : https://aka.ms/ESI-ExchangeCollector-Script. The file to download is CollectExchSecIns.zip""}, {""title"": ""Copy the script folder"", ""description"": ""Unzip the content and copy the script folder on a server where Exchange PowerShell Cmdlets are present.""}, {""title"": ""Unblock the PS1 Scripts"", ""description"": ""Click right on each PS1 Script and go to Properties tab.\n If the script is marked as blocked, unblock it. You can also use the Cmdlet 'Unblock-File *.* in the unzipped folder using PowerShell.""}, {""title"": ""Configure Network Access "", ""description"": ""Ensure that the script can contact Azure Analytics (*.ods.opinsights.azure.com).""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the ESI Collector Script"", ""description"": ""Be sure to be local administrator of the server.\nIn 'Run as Administrator' mode, launch the 'setup.ps1' script to configure the collector.\n Fill the Log Analytics (Microsoft Sentinel) Workspace information.\n Fill the Environment name or leave empty. By default, choose 'Def' as Default analysis. The other choices are for specific usage."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Schedule the ESI Collector Script (If not done by the Install Script due to lack of permission or ignored during installation)"", ""description"": ""The script needs to be scheduled to send Exchange configuration to Microsoft Sentinel.\n We recommend to schedule the script once a day.\n The account used to launch the Script needs to be member of the group Organization Management""}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Parsers are automatically deployed with the solution. Follow the steps to create the Kusto Functions alias : [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)"", ""instructions"": [{""parameters"": {""title"": ""Parsers are automatically deployed during Solution deployment. If you want to deploy manually, follow the steps below"", ""instructionSteps"": [{""title"": ""Manual Parser Deployment"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""1. Download the Parser file"", ""description"": ""The latest version of the file [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)""}, {""title"": ""2. Create Parser **ExchangeAdminAuditLogs** function"", ""description"": ""In 'Logs' explorer of your Microsoft Sentinel's log analytics, copy the content of the file to Log explorer""}, {""title"": ""3. Save Parser **ExchangeAdminAuditLogs** function"", ""description"": ""Click on save button.\n No parameter is needed for this parser.\nClick save again.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Service Account with Organization Management role"", ""description"": ""The service Account that launch the script as scheduled task needs to be Organization Management to be able to retrieve all the needed security Information.""}, {""name"": ""Detailled documentation"", ""description"": "">**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-ExchangeOnPremisesCollector.json","true" +"Event","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-Opt1ExchangeAdminAuditLogsByEventLogs","Microsoft","Microsoft Exchange Admin Audit Logs by Event Logs","[Option 1] - Using Azure Monitor Agent - You can stream all Exchange Audit events from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to view dashboards, create custom alerts, and improve investigation. This is used by Microsoft Exchange Security Workbooks to provide security insights of your On-Premises Exchange environment","[{""description"": "">**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions)\n\n>This Data Connector is the **option 1** of the wiki.""}, {""title"": ""1. Download and install the agents needed to collect logs for Microsoft Sentinel"", ""description"": ""Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Deploy Monitor Agents"", ""description"": ""This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers\n**Deploy the Azure Arc Agent**\n> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. [Option 1] MS Exchange Management Log collection - MS Exchange Admin Audit event logs by Data Collection Rules"", ""description"": ""The MS Exchange Admin Audit event logs are collected using Data Collection Rules (DCR) and allow to store all Administrative Cmdlets executed in an Exchange environment."", ""instructions"": [{""parameters"": {""title"": ""DCR"", ""instructionSteps"": [{""title"": ""Data Collection Rules Deployment"", ""description"": ""**Enable data collection rule**\n> Microsoft Exchange Admin Audit Events logs are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template (Prefered)"", ""description"": ""Use this method for automated deployment of the DCR.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption1-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCR, Type Event log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Custom' option, enter 'MSExchange Management' as expression and Add it.\n6. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Parsers are automatically deployed with the solution. Follow the steps to create the Kusto Functions alias : [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)"", ""instructions"": [{""parameters"": {""title"": ""Parsers are automatically deployed during Solution deployment. If you want to deploy manually, follow the steps below"", ""instructionSteps"": [{""title"": ""Manual Parser Deployment"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""1. Download the Parser file"", ""description"": ""The latest version of the file [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)""}, {""title"": ""2. Create Parser **ExchangeAdminAuditLogs** function"", ""description"": ""In 'Logs' explorer of your Microsoft Sentinel's log analytics, copy the content of the file to Log explorer""}, {""title"": ""3. Save Parser **ExchangeAdminAuditLogs** function"", ""description"": ""Click on save button.\n No parameter is needed for this parser.\nClick save again.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""Azure Log Analytics will be deprecated, to collect data from non-Azure VMs, Azure Arc is recommended. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""name"": ""Detailled documentation"", ""description"": "">**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-Opt1ExchangeAdminAuditLogsByEventLogs.json","true" +"Event","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-Opt2ExchangeServersEventLogs","Microsoft","Microsoft Exchange Logs and Events","[Option 2] - Using Azure Monitor Agent - You can stream all Exchange Security & Application Event logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to create custom alerts, and improve investigation.","[{""description"": "">**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions)\n\n>This Data Connector is the **option 2** of the wiki.""}, {""title"": ""1. Download and install the agents needed to collect logs for Microsoft Sentinel"", ""description"": ""Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Deploy Monitor Agents"", ""description"": ""This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers\n**Deploy the Azure Arc Agent**\n> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. [Option 2] Security/Application/System logs of Exchange Servers"", ""description"": ""The Security/Application/System logs of Exchange Servers are collected using Data Collection Rules (DCR)."", ""instructions"": [{""parameters"": {""title"": ""Security Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - Security Event logs"", ""description"": ""**Enable data collection rule for Security Logs**\nSecurity Events logs are collected only from **Windows** agents.\n1. Add Exchange Servers on *Resources* tab.\n2. Select Security log level\n\n> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition."", ""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 0}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""title"": ""Application and System Event log collection"", ""instructionSteps"": [{""title"": ""Enable data collection rule"", ""description"": ""> Application and System Events logs are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template (Prefered method)"", ""description"": ""Use this method for automated deployment of the DCR.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCR, Type Event log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Basic' option.\n6. For Application, select 'Critical', 'Error' and 'Warning'. For System, select Critical/Error/Warning/Information. \n7. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Log Analytics will be deprecated"", ""description"": ""Azure Log Analytics will be deprecated, to collect data from non-Azure VMs, Azure Arc is recommended. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""name"": ""Detailled documentation"", ""description"": "">**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-Opt2ExchangeServersEventLogs.json","true" +"SecurityEvent","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-Opt34DomainControllersSecurityEventLogs","Microsoft"," Microsoft Active-Directory Domain Controllers Security Event Logs","[Option 3 & 4] - Using Azure Monitor Agent -You can stream a part or all Domain Controllers Security Event logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to create custom alerts, and improve investigation.","[{""description"": "">**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions)\n\n>This Data Connector is the **option 3 and 4** of the wiki.""}, {""title"": ""1. Download and install the agents needed to collect logs for Microsoft Sentinel"", ""description"": ""Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Deploy Monitor Agents"", ""description"": ""This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers\n**Deploy the Azure Arc Agent**\n> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Security logs of Domain Controllers"", ""description"": ""Select how to stream Security logs of Domain Controllers. If you want to implement Option 3, you just need to select DC on same site as Exchange Servers. If you want to implement Option 4, you can select all DCs of your forest."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""[Option 3] List only Domain Controllers on the same site as Exchange Servers for next step"", ""description"": ""**This limits the quantity of data injested but some incident can't be detected.**""}, {""title"": ""[Option 4] List all Domain Controllers of your Active-Directory Forest for next step"", ""description"": ""**This allows collecting all security events**""}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""title"": ""Security Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - Security Event logs"", ""description"": ""**Enable data collection rule for Security Logs**\nSecurity Events logs are collected only from **Windows** agents.\n1. Add chosen DCs on *Resources* tab.\n2. Select Security log level\n\n> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition."", ""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 0}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""Azure Log Analytics will be deprecated, to collect data from non-Azure VMs, Azure Arc is recommended. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""name"": ""Detailled documentation"", ""description"": "">**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-Opt34DomainControllersSecurityEventLogs.json","true" +"W3CIISLog","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-Opt5ExchangeIISLogs","Microsoft","IIS Logs of Microsoft Exchange Servers","[Option 5] - Using Azure Monitor Agent - You can stream all IIS Logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to create custom alerts, and improve investigation.","[{""description"": "">**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions)\n\n>This Data Connector is the **option 5** of the wiki.""}, {""title"": ""1. Download and install the agents needed to collect logs for Microsoft Sentinel"", ""description"": ""Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Deploy Monitor Agents"", ""description"": ""This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers\n**Deploy the Azure Arc Agent**\n> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""[Option 5] IIS logs of Exchange Servers"", ""description"": ""Select how to stream IIS logs of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Enable data collection rule"", ""description"": ""> IIS logs are collected only from **Windows** agents."", ""instructions"": [{""type"": ""AdminAuditEvents""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template (Preferred Method)"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption5-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create DCR, Type IIS log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. Select the created DCE. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'IIS logs' (Do not enter a path if IIS Logs path is configured by default). Click on 'Add data source'\n6. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""Azure Log Analytics will be deprecated, to collect data from non-Azure VMs, Azure Arc is recommended. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""name"": ""Detailled documentation"", ""description"": "">**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-Opt5ExchangeIISLogs.json","true" +"MessageTrackingLog_CL","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-Opt6ExchangeMessageTrackingLogs","Microsoft","Microsoft Exchange Message Tracking Logs","[Option 6] - Using Azure Monitor Agent - You can stream all Exchange Message Tracking from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. Those logs can be used to track the flow of messages in your Exchange environment. This data connector is based on the option 6 of the [Microsoft Exchange Security wiki](https://aka.ms/ESI_DataConnectorOptions).","[{""description"": "">**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions)\n\n>This Data Connector is the **option 6** of the wiki.""}, {""title"": ""1. Download and install the agents needed to collect logs for Microsoft Sentinel"", ""description"": ""Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Deploy Monitor Agents"", ""description"": ""This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers\n**Deploy the Azure Arc Agent**\n> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Message Tracking of Exchange Servers"", ""description"": ""Select how to stream Message Tracking of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Message Tracking are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule and Custom Table"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption6-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Create Custom Table - Explanation"", ""description"": ""The Custom Table can't be created using the Azure Portal. You need to use an ARM template, a PowerShell Script or another method [described here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/create-custom-table?tabs=azure-powershell-1%2Cazure-portal-2%2Cazure-portal-3#create-a-custom-table).""}, {""title"": ""Create Custom Table using an ARM Template"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-MessageTrackingCustomTable)\n2. Select the preferred **Subscription**, **Resource Group**, **Location** and **Analytic Workspace Name**. \n3. Click **Create** to deploy.""}, {""title"": ""Create Custom Table using PowerShell in Cloud Shell"", ""description"": ""1. From the Azure Portal, open a Cloud Shell.\n2. Copy and paste and Execute the following script in the Cloud Shell to create the table.\n\t\t$tableParams = @'\n\t\t{\n\t\t\t\""properties\"": {\n\t\t\t\t\""schema\"": {\n\t\t\t\t\t \""name\"": \""MessageTrackingLog_CL\"",\n\t\t\t\t\t \""columns\"": [\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""directionality\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""reference\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""source\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""TimeGenerated\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""datetime\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""clientHostname\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""clientIP\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""connectorId\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""customData\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""eventId\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""internalMessageId\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""logId\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""messageId\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""messageInfo\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""messageSubject\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""networkMessageId\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""originalClientIp\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""originalServerIp\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""recipientAddress\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""recipientCount\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""recipientStatus\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""relatedRecipientAddress\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""returnPath\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""senderAddress\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""senderHostname\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""serverIp\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""sourceContext\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""schemaVersion\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""messageTrackingTenantId\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""totalBytes\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""transportTrafficType\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""FilePath\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t]\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\t'@\n3. Copy, Replace, Paste and execute the following parameters with your own values:\n\t\t$SubscriptionID = 'YourGUID'\n\t\t$ResourceGroupName = 'YourResourceGroupName'\n\t\t$WorkspaceName = 'YourWorkspaceName'\n4. Execute the Following Cmdlet to create the table:\n\t\tInvoke-AzRestMethod -Path \""/subscriptions/$SubscriptionID/resourcegroups/$ResourceGroupName/providers/microsoft.operationalinsights/workspaces/$WorkspaceName/tables/MessageTrackingLog_CL?api-version=2021-12-01-preview\"" -Method PUT -payload $tableParams""}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE, like ESI-ExchangeServers. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create a DCR, Type Custom log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click on 'Create' button.\n3. On 'Basics' tab, fill the Rule name like **DCR-Option6-MessageTrackingLogs**, select the 'Data Collection Endpoint' with the previously created endpoint and fill other parameters.\n4. In the **Resources** tab, add your Exchange Servers.\n5. In **Collect and Deliver**, add a Data Source type 'Custom Text logs' and enter 'C:\\Program Files\\Microsoft\\Exchange Server\\V15\\TransportRoles\\Logs\\MessageTracking\\*.log' in file pattern, 'MessageTrackingLog_CL' in Table Name.\n6.in Transform field, enter the following KQL request :\n\t\tsource | extend d = split(RawData,',') | extend TimeGenerated =todatetime(d[0]) ,clientIP =tostring(d[1]) ,clientHostname =tostring(d[2]) ,serverIp=tostring(d[3]) ,senderHostname=tostring(d[4]) ,sourceContext=tostring(d[5]) ,connectorId =tostring(d[6]) ,source=tostring(d[7]) ,eventId =tostring(d[8]) ,internalMessageId =tostring(d[9]) ,messageId =tostring(d[10]) ,networkMessageId =tostring(d[11]) ,recipientAddress=tostring(d[12]) ,recipientStatus=tostring(d[13]) ,totalBytes=tostring(d[14]) ,recipientCount=tostring(d[15]) ,relatedRecipientAddress=tostring(d[16]) ,reference=tostring(d[17]) ,messageSubject =tostring(d[18]) ,senderAddress=tostring(d[19]) ,returnPath=tostring(d[20]) ,messageInfo =tostring(d[21]) ,directionality=tostring(d[22]) ,messageTrackingTenantId =tostring(d[23]) ,originalClientIp =tostring(d[24]) ,originalServerIp =tostring(d[25]) ,customData=tostring(d[26]) ,transportTrafficType =tostring(d[27]) ,logId =tostring(d[28]) ,schemaVersion=tostring(d[29]) | project-away d,RawData\n and click on 'Destination'.\n6. In 'Destination', add a destination and select the Workspace where you have previously created the Custom Table \n7. Click on 'Add data source'.\n8. Fill other required parameters and tags and create the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Log Analytics will be deprecated"", ""description"": ""Azure Log Analytics will be deprecated, to collect data from non-Azure VMs, Azure Arc is recommended. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""name"": ""Detailled documentation"", ""description"": "">**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-Opt6ExchangeMessageTrackingLogs.json","true" +"ExchangeHttpProxy_CL","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-Opt7ExchangeHTTPProxyLogs","Microsoft","Microsoft Exchange HTTP Proxy Logs","[Option 7] - Using Azure Monitor Agent - You can stream HTTP Proxy logs and Security Event logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you create custom alerts, and improve investigation. [Learn more](https://aka.ms/ESI_DataConnectorOptions)","[{""description"": "">**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions)\n\n>This Data Connector is the **option 7** of the wiki.""}, {""title"": ""1. Download and install the agents needed to collect logs for Microsoft Sentinel"", ""description"": ""Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Deploy Monitor Agents"", ""description"": ""This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers\n**Deploy the Azure Arc Agent**\n> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. [Option 7] HTTP Proxy of Exchange Servers"", ""description"": ""Select how to stream HTTP Proxy of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Message Tracking are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template (Prefered Method)"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption7-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Create Custom Table - Explanation"", ""description"": ""The Custom Table can't be created using the Azure Portal. You need to use an ARM template, a PowerShell Script or another method [described here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/create-custom-table?tabs=azure-powershell-1%2Cazure-portal-2%2Cazure-portal-3#create-a-custom-table).""}, {""title"": ""Create Custom Table using an ARM Template"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-HTTPProxyCustomTable)\n2. Select the preferred **Subscription**, **Resource Group**, **Location** and **Analytic Workspace Name**. \n3. Click **Create** to deploy.""}, {""title"": ""Create Custom Table using PowerShell in Cloud Shell"", ""description"": ""1. From the Azure Portal, open a Cloud Shell.\n2. Copy and paste and Execute the following script in the Cloud Shell to create the table.\n\t\t$tableParams = @'\n\t\t{\n\t\t\t\""properties\"": {\n\t\t\t\t \""schema\"": {\n\t\t\t\t\t\t\""name\"": \""ExchangeHttpProxy_CL\"",\n\t\t\t\t\t\t\""columns\"": [\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""AccountForestLatencyBreakup\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ActivityContextLifeTime\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ADLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""AnchorMailbox\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""AuthenticatedUser\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""AuthenticationType\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""AuthModulePerfContext\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""BackEndCookie\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""BackEndGenericInfo\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""BackendProcessingLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""BackendReqInitLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""BackendReqStreamLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""BackendRespInitLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""BackendRespStreamLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""BackEndStatus\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""BuildVersion\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""CalculateTargetBackEndLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ClientIpAddress\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ClientReqStreamLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ClientRequestId\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ClientRespStreamLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""CoreLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""DatabaseGuid\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""EdgeTraceId\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ErrorCode\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""GenericErrors\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""GenericInfo\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""GlsLatencyBreakup\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""HandlerCompletionLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""HandlerToModuleSwitchingLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""HttpPipelineLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""HttpProxyOverhead\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""HttpStatus\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""IsAuthenticated\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""KerberosAuthHeaderLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""MajorVersion\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""Method\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""MinorVersion\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ModuleToHandlerSwitchingLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""Organization\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""PartitionEndpointLookupLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""Protocol\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ProtocolAction\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ProxyAction\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ProxyTime\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""RequestBytes\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""RequestHandlerLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""RequestId\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ResourceForestLatencyBreakup\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ResponseBytes\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""RevisionVersion\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""RouteRefresherLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""RoutingHint\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""RoutingLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""RoutingStatus\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""RoutingType\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ServerHostName\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ServerLocatorHost\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ServerLocatorLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""SharedCacheLatencyBreakup\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""TargetOutstandingRequests\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""TargetServer\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""TargetServerVersion\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""TotalAccountForestLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""TotalGlsLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""TotalRequestTime\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""TotalResourceForestLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""TotalSharedCacheLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""UrlHost\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""UrlQuery\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""UrlStem\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""UserADObjectGuid\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""UserAgent\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""TimeGenerated\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""datetime\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""FilePath\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t]\n\t\t\t\t }\n\t\t\t }\n\t\t }\n\t\t '@\n3. Copy, Replace, Paste and execute the following parameters with your own values:\n\t\t$SubscriptionID = 'YourGUID'\n\t\t$ResourceGroupName = 'YourResourceGroupName'\n\t\t$WorkspaceName = 'YourWorkspaceName'\n4. Execute the Following Cmdlet to create the table:\n\t\tInvoke-AzRestMethod -Path \""/subscriptions/$SubscriptionID/resourcegroups/$ResourceGroupName/providers/microsoft.operationalinsights/workspaces/$WorkspaceName/tables/ExchangeHttpProxy_CL?api-version=2021-12-01-preview\"" -Method PUT -payload $tableParams""}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create a DCR, Type Custom log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click on 'Create' button.\n3. On 'Basics' tab, fill the Rule name like **DCR-Option7-HTTPProxyLogs**, select the 'Data Collection Endpoint' with the previously created endpoint and fill other parameters.\n4. In the **Resources** tab, add your Exchange Servers.\n5. In **Collect and Deliver**, add a Data Source type 'Custom Text logs' and enter the following file pattern : \n\t\t'C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Autodiscover\\*.log','C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Eas\\*.log','C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Ecp\\*.log','C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Ews\\*.log','C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Mapi\\*.log','C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Oab\\*.log','C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Owa\\*.log','C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\OwaCalendar\\*.log','C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\PowerShell\\*.log','C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\RpcHttp\\*.log'\n6. Put 'ExchangeHttpProxy_CL' in Table Name.\n7. in Transform field, enter the following KQL request :\n\t\tsource | extend d = split(RawData,',') | extend DateTime=todatetime(d[0]),RequestId=tostring(d[1]) ,MajorVersion=tostring(d[2]) ,MinorVersion=tostring(d[3]) ,BuildVersion=tostring(d[4]) ,RevisionVersion=tostring(d[5]) ,ClientRequestId=tostring(d[6]) ,Protocol=tostring(d[7]) ,UrlHost=tostring(d[8]) ,UrlStem=tostring(d[9]) ,ProtocolAction=tostring(d[10]) ,AuthenticationType=tostring(d[11]) ,IsAuthenticated=tostring(d[12]) ,AuthenticatedUser=tostring(d[13]) ,Organization=tostring(d[14]) ,AnchorMailbox=tostring(d[15]) ,UserAgent=tostring(d[16]) ,ClientIpAddress=tostring(d[17]) ,ServerHostName=tostring(d[18]) ,HttpStatus=tostring(d[19]) ,BackEndStatus=tostring(d[20]) ,ErrorCode=tostring(d[21]) ,Method=tostring(d[22]) ,ProxyAction=tostring(d[23]) ,TargetServer=tostring(d[24]) ,TargetServerVersion=tostring(d[25]) ,RoutingType=tostring(d[26]) ,RoutingHint=tostring(d[27]) ,BackEndCookie=tostring(d[28]) ,ServerLocatorHost=tostring(d[29]) ,ServerLocatorLatency=tostring(d[30]) ,RequestBytes=tostring(d[31]) ,ResponseBytes=tostring(d[32]) ,TargetOutstandingRequests=tostring(d[33]) ,AuthModulePerfContext=tostring(d[34]) ,HttpPipelineLatency=tostring(d[35]) ,CalculateTargetBackEndLatency=tostring(d[36]) ,GlsLatencyBreakup=tostring(d[37]) ,TotalGlsLatency=tostring(d[38]) ,AccountForestLatencyBreakup=tostring(d[39]) ,TotalAccountForestLatency=tostring(d[40]) ,ResourceForestLatencyBreakup=tostring(d[41]) ,TotalResourceForestLatency=tostring(d[42]) ,ADLatency=tostring(d[43]) ,SharedCacheLatencyBreakup=tostring(d[44]) ,TotalSharedCacheLatency=tostring(d[45]) ,ActivityContextLifeTime=tostring(d[46]) ,ModuleToHandlerSwitchingLatency=tostring(d[47]) ,ClientReqStreamLatency=tostring(d[48]) ,BackendReqInitLatency=tostring(d[49]) ,BackendReqStreamLatency=tostring(d[50]) ,BackendProcessingLatency=tostring(d[51]) ,BackendRespInitLatency=tostring(d[52]) ,BackendRespStreamLatency=tostring(d[53]) ,ClientRespStreamLatency=tostring(d[54]) ,KerberosAuthHeaderLatency=tostring(d[55]) ,HandlerCompletionLatency=tostring(d[56]) ,RequestHandlerLatency=tostring(d[57]) ,HandlerToModuleSwitchingLatency=tostring(d[58]) ,ProxyTime=tostring(d[59]) ,CoreLatency=tostring(d[60]) ,RoutingLatency=tostring(d[61]) ,HttpProxyOverhead=tostring(d[62]) ,TotalRequestTime=tostring(d[63]) ,RouteRefresherLatency=tostring(d[64]) ,UrlQuery=tostring(d[65]) ,BackEndGenericInfo=tostring(d[66]) ,GenericInfo=tostring(d[67]) ,GenericErrors=tostring(d[68]) ,EdgeTraceId=tostring(d[69]) ,DatabaseGuid=tostring(d[70]) ,UserADObjectGuid=tostring(d[71]) ,PartitionEndpointLookupLatency=tostring(d[72]) ,RoutingStatus=tostring(d[73]) | extend TimeGenerated = DateTime | project-away d,RawData,DateTime | project-away d,RawData,DateTime\n and click on 'Destination'.\n8. In 'Destination', add a destination and select the Workspace where you have previously created the Custom Table \n9. Click on 'Add data source'.\n10. Fill other required parameters and tags and create the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Log Analytics will be deprecated"", ""description"": ""Azure Log Analytics will be deprecated, to collect data from non-Azure VMs, Azure Arc is recommended. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""name"": ""Detailled documentation"", ""description"": "">**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-Opt7ExchangeHTTPProxyLogs.json","true" +"ESIExchangeOnlineConfig_CL","Microsoft Exchange Security - Exchange Online","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20Online","microsoftsentinelcommunity","azure-sentinel-solution-esionline","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-ExchangeOnlineCollector","Microsoft","Exchange Security Insights Online Collector","Connector used to push Exchange Online Security configuration for Microsoft Sentinel Analysis","[{""description"": "">**NOTE - UPDATE**"", ""instructions"": [{""parameters"": {""text"": ""

NOTE - UPDATE:

We recommend to Update the Collector to Version 7.6.0.0 or highier.
The Collector Script Update procedure could be found here : ESI Online Collector Update"", ""visible"": true, ""inline"": false}, ""type"": ""InfoMessage""}]}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Follow the steps for each Parser to create the Kusto Functions alias : [**ExchangeConfiguration**](https://aka.ms/sentinel-ESI-ExchangeConfiguration-Online-parser) and [**ExchangeEnvironmentList**](https://aka.ms/sentinel-ESI-ExchangeEnvironmentList-Online-parser) \n\n**STEP 1 - Parsers deployment**"", ""instructions"": [{""parameters"": {""title"": ""Parser deployment (When using Microsoft Exchange Security Solution, Parsers are automatically deployed)"", ""instructionSteps"": [{""title"": ""1. Download the Parser files"", ""description"": ""The latest version of the 2 files [**ExchangeConfiguration.yaml**](https://aka.ms/sentinel-ESI-ExchangeConfiguration-Online-parser) and [**ExchangeEnvironmentList.yaml**](https://aka.ms/sentinel-ESI-ExchangeEnvironmentList-Online-parser)""}, {""title"": ""2. Create Parser **ExchangeConfiguration** function"", ""description"": ""In 'Logs' explorer of your Microsoft Sentinel's log analytics, copy the content of the file to Log explorer""}, {""title"": ""3. Save Parser **ExchangeConfiguration** function"", ""description"": ""Click on save button.\n Define the parameters as asked on the header of the parser file.\nClick save again.""}, {""title"": ""4. Reproduce the same steps for Parser **ExchangeEnvironmentList**"", ""description"": ""Reproduce the step 2 and 3 with the content of 'ExchangeEnvironmentList.yaml' file""}]}, ""type"": ""InstructionStepsGroup""}]}, {""description"": "">**NOTE:** This connector uses Azure Automation to connect to 'Exchange Online' to pull its Security analysis into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Automation pricing page](https://azure.microsoft.com/pricing/details/automation/) for details.""}, {""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Automation**\n\n>**IMPORTANT:** Before deploying the 'ESI Exchange Online Security Configuration' connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Exchange Online tenant name (contoso.onmicrosoft.com), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the 'ESI Exchange Online Security Configuration' connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-ExchangeCollector-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **Tenant Name**, 'and/or Other required fields'. \n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""description"": ""**Option 2 - Manual Deployment of Azure Automation**\n\n Use the following step-by-step instructions to deploy the 'ESI Exchange Online Security Configuration' connector manually with Azure Automation."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create the Azure Automation Account"", ""description"": ""1. From the Azure Portal, navigate to [Azure Automation Account](https://portal.azure.com/#view/HubsExtension/BrowseResource/resourceType/Microsoft.Automation%2FAutomationAccounts).\n2. Click **+ Add** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the Azure Automation. \n4. In the **Advanced** and **Networking** and **Tags** Tabs, leave fields as default if you don't need to customize them.\n5. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Add Exchange Online Management Module, Microsoft Graph (Authentication, User and Group) Modules"", ""description"": ""1. On the Automation Account page, select **Modules**.\n2. Click on **Browse gallery** and search the **ExchangeOnlineManagement** module.\n3. Select it and click on **Select**.\n4. Choose Version **5.1** on Runtime version field and click on Import button.\nRepeat the step for the following modules : 'Microsoft.Graph.Authentication', 'Microsoft.Graph.Users' and 'Microsoft.Graph.Groups. **Attention, you need to wait for Microsoft.Graph.Authentication installation before processing next modules**""}, {""title"": ""C. Download the Runbook Content"", ""description"": ""1. Download the latest version of ESI Collector. The latest version can be found here : https://aka.ms/ESI-ExchangeCollector-Script\n2. Unzip the file to find the JSON file and the PS1 file for next step.\n""}, {""title"": ""D. Create Runbook"", ""description"": ""1. On the Automation Account page, select the **Runbooks** button.\n2. Click on **Create a runbook** and name it like 'ESI-Collector' with a runbook type **PowerShell**, Runtime Version **5.1** and click 'Create'.\n2. Import the content of the previous step's PS1 file in the Runbook window.\n3. Click on **Publish**""}, {""title"": ""E. Create GlobalConfiguration Variable"", ""description"": ""1. On the Automation Account page, select the **Variables** button.\n2. Click on **Add a Variable** and name it exaclty 'GlobalConfiguration' with a type **String**.\n2. On 'Value' field, copy the content of the previous step's JSON file.\n3. Inside the content, replace the values of **WorkspaceID** and **WorkspaceKey**.\n4. Click on 'Create' button.""}, {""title"": ""F. Create TenantName Variable"", ""description"": ""1. On the Automation Account page, select the **Variables** button.\n2. Click on **Add a Variable** and name it exaclty 'TenantName' with a type **String**.\n3. On 'Value' field, write the tenant name of your Exchange Online.\n4. Click on 'Create' button.""}, {""title"": ""G. Create LastDateTracking Variable"", ""description"": ""1. On the Automation Account page, select the **Variables** button.\n2. Click on **Add a Variable** and name it exaclty 'LastDateTracking' with a type **String**.\n3. On 'Value' field, write 'Never'.\n4. Click on 'Create' button.""}, {""title"": ""H. Create a Runbook Schedule"", ""description"": ""1. On the Automation Account page, select the **Runbook** button and click on your created runbook.\n2. Click on **Schedules** and **Add a schedule** button.\n3. Click on **Schedule**, **Add a Schedule** and name it. Select **Recurring** value with a reccurence of every 1 day, click 'Create'.\n4. Click on 'Configure parameters and run settings'. Leave all empty and click on **OK** and **OK** again.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""description"": ""**STEP 3 - Assign Microsoft Graph Permission and Exchange Online Permission to Managed Identity Account** \n\nTo be able to collect Exchange Online information and to be able to retrieve User information and memberlist of admin groups, the automation account need multiple permission."", ""instructions"": [{""parameters"": {""title"": ""Assign Permissions by Script"", ""instructionSteps"": [{""title"": ""A. Download Permission Script"", ""description"": ""[Permission Update script](https://aka.ms/ESI-ExchangeCollector-Permissions)""}, {""title"": ""B. Retrieve the Azure Automation Managed Identity GUID and insert it in the downloaded script"", ""description"": ""1. Go to your Automation Account, in the **Identity** Section. You can find the Guid of your Managed Identity.\n2. Replace the GUID in $MI_ID = \""XXXXXXXXXXX\"" with the GUID of your Managed Identity.""}, {""title"": ""C. Launch the script with a **Global-Administrator** account"", ""description"": ""**Attention this script requires MSGraph Modules and Admin Consent to access to your tenant with Microsoft Graph**.\n\tThe script will add 3 permissions to the Managed identity:\n\t1. Exchange Online ManageAsApp permission\n\t2. User.Read.All on Microsoft Graph API\n\t3. Group.Read.All on Microsoft Graph API""}, {""title"": ""D. Exchange Online Role Assignment"", ""description"": ""1. As a **Global Administrator**, go to **Roles and Administrators**.\n2. Select **Global Reader** role or **Security Reader** and click to 'Add assignments'.\n3. Click on 'No member selected' and search your Managed Identity account Name beginning by **the name of your automation account** like 'ESI-Collector'. Select it and click on 'Select'.\n4. Click **Next** and validate the assignment by clicking **Assign**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""microsoft.automation/automationaccounts permissions"", ""description"": ""Read and write permissions to create an Azure Automation with a Runbook is required. [See the documentation to learn more about Automation Account](https://learn.microsoft.com/en-us/azure/automation/overview).""}, {""name"": ""Microsoft.Graph permissions"", ""description"": ""Groups.Read, Users.Read and Auditing.Read permissions are required to retrieve user/group information linked to Exchange Online assignments. [See the documentation to learn more](https://aka.ms/sentinel-ESI-OnlineCollectorPermissions).""}, {""name"": ""Exchange Online permissions"", ""description"": ""Exchange.ManageAsApp permission and **Global Reader** or **Security Reader** Role are needed to retrieve the Exchange Online Security Configuration.[See the documentation to learn more](https://aka.ms/sentinel-ESI-OnlineCollectorPermissions).""}, {""name"": ""(Optional) Log Storage permissions"", ""description"": ""Storage Blob Data Contributor to a storage account linked to the Automation Account Managed identity or an Application ID is mandatory to store logs.[See the documentation to learn more](https://aka.ms/sentinel-ESI-OnlineCollectorPermissions).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20Online/Data%20Connectors/ESI-ExchangeOnlineCollector.json","true" +"PowerBIActivity","Microsoft PowerBI","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20PowerBI","azuresentinel","azure-sentinel-solution-microsoftpowerbi","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OfficePowerBI","Microsoft","Microsoft PowerBI","Microsoft PowerBI is a collection of software services, apps, and connectors that work together to turn your unrelated sources of data into coherent, visually immersive, and interactive insights. Your data may be an Excel spreadsheet, a collection of cloud-based and on-premises hybrid data warehouses, or a data store of some other type. This connector lets you stream PowerBI audit logs into Microsoft Sentinel, allowing you to track user activities in your PowerBI environment. You can filter the audit data by date range, user, dashboard, report, dataset, and activity type.","[{""title"": ""Connect Microsoft PowerBI audit logs to Microsoft Sentinel"", ""description"": ""This connector uses the Office Management API to get your PowerBI audit logs. The logs will be stored and processed in your existing Microsoft Sentinel workspace. You can find the data in the **PowerBIActivity** table."", ""instructions"": [{""parameters"": {""connectorKind"": ""OfficePowerBI"", ""title"": ""Microsoft PowerBI"", ""enable"": true}, ""type"": ""SentinelResourceProvider""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""tenant"": [""GlobalAdmin"", ""SecurityAdmin""], ""customs"": [{""name"": ""License"", ""description"": ""Microsoft Power BI eligible license is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20PowerBI/Data%20Connectors/template_OfficePowerBI.json","true" +"","Microsoft Project","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Project","azuresentinel","azure-sentinel-solution-microsoftproject","2022-05-23","","","Microsoft","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"PurviewDataSensitivityLogs","Microsoft Purview","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Purview","azuresentinel","azure-sentinel-solution-azurepurview","2021-11-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftAzurePurview","Microsoft","Microsoft Purview","Connect to Microsoft Purview to enable data sensitivity enrichment of Microsoft Sentinel. Data classification and sensitivity label logs from Microsoft Purview scans can be ingested and visualized through workbooks, analytical rules, and more. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2224125&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect Microsoft Purview to Microsoft Sentinel"", ""description"": ""Within the Azure Portal, navigate to your Purview resource:\n 1. In the search bar, search for **Purview accounts.**\n 2. Select the specific account that you would like to be set up with Sentinel.\n\nInside your Microsoft Purview resource:\n 3. Select **Diagnostic Settings.**\n 4. Select **+ Add diagnostic setting.**\n 5. In the **Diagnostic setting** blade:\n - Select the Log Category as **DataSensitivityLogEvent**.\n - Select **Send to Log Analytics**.\n - Chose the log destination workspace. This should be the same workspace that is used by **Microsoft Sentinel.**\n - Click **Save**."", ""instructions"": []}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Microsoft Purview account Owner or Contributor role to set up Diagnostic Settings. Microsoft Contributor role with write permissions to enable data connector, view workbook, and create analytic rules."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Purview/Data%20Connectors/MicrosoftPurview.json","true" +"MicrosoftPurviewInformationProtection","Microsoft Purview Information Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Purview%20Information%20Protection","azuresentinel","azure-sentinel-solution-mip","2023-01-06","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftPurviewInformationProtection","Microsoft","Microsoft Purview Information Protection","Microsoft Purview Information Protection helps you discover, classify, protect, and govern sensitive information wherever it lives or travels. Using these capabilities enable you to know your data, identify items that are sensitive and gain visibility into how they are being used to better protect your data. Sensitivity labels are the foundational capability that provide protection actions, applying encryption, access restrictions and visual markings.
Integrate Microsoft Purview Information Protection logs with Microsoft Sentinel to view dashboards, create custom alerts and improve investigation. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2223811&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect Microsoft Purview Information Protection audit logs to Microsoft Sentinel"", ""instructions"": [{""parameters"": {""connectorKind"": ""MicrosoftPurviewInformationProtection"", ""title"": """", ""enable"": true}, ""type"": ""SentinelResourceProvider""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""tenant"": [""GlobalAdmin"", ""SecurityAdmin""], ""customs"": [{""name"": ""License"", ""description"": ""Enterprise Mobility + Security E5/A5 or Microsoft 365 E5/A5 or P2""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Purview%20Information%20Protection/Data%20Connectors/MicrosoftPurviewInformationProtection.json","true" +"Syslog","Microsoft Sysmon For Linux","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Sysmon%20For%20Linux","azuresentinel","azure-sentinel-solution-sysmonforlinux","2021-10-27","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftSysmonForLinux","Microsoft","[Deprecated] Microsoft Sysmon For Linux","[Sysmon for Linux](https://github.com/Sysinternals/SysmonForLinux) provides detailed information about process creations, network connections and other system events.
[Sysmon for linux link:]. The Sysmon for Linux connector uses [Syslog](https://aka.ms/sysLogInfo) as its data ingestion method. This solution depends on ASIM to work as expected. [Deploy ASIM](https://aka.ms/DeployASIM) to get the full value from the solution.","[{""title"": """", ""description"": "">This data connector depends on ASIM parsers based on a Kusto Functions to work as expected. [Deploy the parsers](https://aka.ms/ASimSysmonForLinuxARM) \n\n The following functions will be deployed:\n\n - vimFileEventLinuxSysmonFileCreated, vimFileEventLinuxSysmonFileDeleted\n\n - vimProcessCreateLinuxSysmon, vimProcessTerminateLinuxSysmon\n\n - vimNetworkSessionLinuxSysmon \n\n[Read more](https://aka.ms/AboutASIM)"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n2. Select **Apply below configuration to my machines** and select the facilities and severities.\n3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Sysmon%20For%20Linux/Data%20Connectors/SysmonForLinux.json","true" +"vimProcessCreateLinuxSysmon","Microsoft Sysmon For Linux","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Sysmon%20For%20Linux","azuresentinel","azure-sentinel-solution-sysmonforlinux","2021-10-27","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftSysmonForLinux","Microsoft","[Deprecated] Microsoft Sysmon For Linux","[Sysmon for Linux](https://github.com/Sysinternals/SysmonForLinux) provides detailed information about process creations, network connections and other system events.
[Sysmon for linux link:]. The Sysmon for Linux connector uses [Syslog](https://aka.ms/sysLogInfo) as its data ingestion method. This solution depends on ASIM to work as expected. [Deploy ASIM](https://aka.ms/DeployASIM) to get the full value from the solution.","[{""title"": """", ""description"": "">This data connector depends on ASIM parsers based on a Kusto Functions to work as expected. [Deploy the parsers](https://aka.ms/ASimSysmonForLinuxARM) \n\n The following functions will be deployed:\n\n - vimFileEventLinuxSysmonFileCreated, vimFileEventLinuxSysmonFileDeleted\n\n - vimProcessCreateLinuxSysmon, vimProcessTerminateLinuxSysmon\n\n - vimNetworkSessionLinuxSysmon \n\n[Read more](https://aka.ms/AboutASIM)"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n2. Select **Apply below configuration to my machines** and select the facilities and severities.\n3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Sysmon%20For%20Linux/Data%20Connectors/SysmonForLinux.json","true" +"","Microsoft Windows SQL Server Database Audit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Windows%20SQL%20Server%20Database%20Audit","microsoftsentinelcommunity","azure-sentinel-solution-sqlserverdatabaseaudit","2022-11-29","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","","","","","","","","false" +"","MicrosoftDefenderForEndpoint","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MicrosoftDefenderForEndpoint","azuresentinel","azure-sentinel-solution-microsoftdefenderendpoint","2022-01-31","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","MicrosoftPurviewInsiderRiskManagement","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MicrosoftPurviewInsiderRiskManagement","azuresentinel","azure-sentinel-solution-insiderriskmanagement","2021-10-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"Awareness_Performance_Details_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastATAPI","Mimecast","Mimecast Awareness Training","The data connector for [Mimecast Awareness Training](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- Performance Details
- Safe Score Details
- User Data
- Watchlist Details
","[{""title"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""title"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret\n5. Entra Object ID""}, {""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Mimecast Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 4 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Mimecast API authorization key(s) or Token, readily available.""}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Mimecast Awareness Training Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastAT-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-MimecastAT-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Region**. \n3. Enter the below information : \n\n\t a. Location - The location in which the data collection rules and data collection endpoints should be deployed\n\n\t b. WorkspaceName - Enter Microsoft Sentinel Workspace Name of Log Analytics workspace\n\n\t c. AzureClientID - Enter Azure Client ID that you have created during app registration\n\n\t d. AzureClientSecret - Enter Azure Client Secret that you have created during creating the client secret\n\n\t e. AzureTenantID - Enter Azure Tenant ID of your Azure Active Directory\n\n\t f. AzureEntraObjectID - Enter Object id of your Microsoft Entra App\n\n\t g. MimecastBaseURL - Enter Base URL of Mimecast API 2.0 (e.g. https://api.services.mimecast.com)\n\n\t h. MimecastClientID - Enter Mimecast Client ID for authentication\n\n\t i. MimecastClientSecret - Enter Mimecast Client Secret for authentication\n\n\t j. MimecastAwarenessPerformanceDetailsTableName - Enter name of the table used to store Awareness Performance Details data. Default is 'Awareness_Performance_Details'\n\n\t k. MimecastAwarenessUserDataTableName - Enter name of the table used to store Awareness User Data data. Default is 'Awareness_User_Data'\n\n\t l. MimecastAwarenessWatchlistDetailsTableName - Enter name of the table used to store Awareness Watchlist Details data. Default is 'Awareness_Watchlist_Details'\n\n\t m. MimecastAwarenessSafeScoreDetailsTableName - Enter name of the table used to store Awareness SafeScore Details data. Default is 'Awareness_SafeScore_Details'\n\n\t n. StartDate - Enter the start date in the 'yyyy-mm-dd' format. If you do not provide a date, data from the last 60 days will be fetched automatically. Ensure that the date is in the past and properly formatted\n\n\t o. Schedule - Please enter a valid Quartz cron-expression. (Example: 0 0 */1 * * *) Do not keep the value empty, minimum value is 10 minutes\n\n\t p. LogLevel - Please add log level or log severity value. By default it is set to INFO\n\n\t q. AppInsightsWorkspaceResourceId - Migrate Classic Application Insights to Log Analytic Workspace which is retiring by 29 Febraury 2024. Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""See the documentation to learn more about API on the [Rest API reference](https://integrations.mimecast.com/documentation/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastAT/Mimecast_AT_FunctionApp.json","true" +"Awareness_SafeScore_Details_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastATAPI","Mimecast","Mimecast Awareness Training","The data connector for [Mimecast Awareness Training](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- Performance Details
- Safe Score Details
- User Data
- Watchlist Details
","[{""title"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""title"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret\n5. Entra Object ID""}, {""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Mimecast Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 4 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Mimecast API authorization key(s) or Token, readily available.""}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Mimecast Awareness Training Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastAT-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-MimecastAT-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Region**. \n3. Enter the below information : \n\n\t a. Location - The location in which the data collection rules and data collection endpoints should be deployed\n\n\t b. WorkspaceName - Enter Microsoft Sentinel Workspace Name of Log Analytics workspace\n\n\t c. AzureClientID - Enter Azure Client ID that you have created during app registration\n\n\t d. AzureClientSecret - Enter Azure Client Secret that you have created during creating the client secret\n\n\t e. AzureTenantID - Enter Azure Tenant ID of your Azure Active Directory\n\n\t f. AzureEntraObjectID - Enter Object id of your Microsoft Entra App\n\n\t g. MimecastBaseURL - Enter Base URL of Mimecast API 2.0 (e.g. https://api.services.mimecast.com)\n\n\t h. MimecastClientID - Enter Mimecast Client ID for authentication\n\n\t i. MimecastClientSecret - Enter Mimecast Client Secret for authentication\n\n\t j. MimecastAwarenessPerformanceDetailsTableName - Enter name of the table used to store Awareness Performance Details data. Default is 'Awareness_Performance_Details'\n\n\t k. MimecastAwarenessUserDataTableName - Enter name of the table used to store Awareness User Data data. Default is 'Awareness_User_Data'\n\n\t l. MimecastAwarenessWatchlistDetailsTableName - Enter name of the table used to store Awareness Watchlist Details data. Default is 'Awareness_Watchlist_Details'\n\n\t m. MimecastAwarenessSafeScoreDetailsTableName - Enter name of the table used to store Awareness SafeScore Details data. Default is 'Awareness_SafeScore_Details'\n\n\t n. StartDate - Enter the start date in the 'yyyy-mm-dd' format. If you do not provide a date, data from the last 60 days will be fetched automatically. Ensure that the date is in the past and properly formatted\n\n\t o. Schedule - Please enter a valid Quartz cron-expression. (Example: 0 0 */1 * * *) Do not keep the value empty, minimum value is 10 minutes\n\n\t p. LogLevel - Please add log level or log severity value. By default it is set to INFO\n\n\t q. AppInsightsWorkspaceResourceId - Migrate Classic Application Insights to Log Analytic Workspace which is retiring by 29 Febraury 2024. Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""See the documentation to learn more about API on the [Rest API reference](https://integrations.mimecast.com/documentation/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastAT/Mimecast_AT_FunctionApp.json","true" +"Awareness_User_Data_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastATAPI","Mimecast","Mimecast Awareness Training","The data connector for [Mimecast Awareness Training](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- Performance Details
- Safe Score Details
- User Data
- Watchlist Details
","[{""title"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""title"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret\n5. Entra Object ID""}, {""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Mimecast Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 4 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Mimecast API authorization key(s) or Token, readily available.""}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Mimecast Awareness Training Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastAT-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-MimecastAT-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Region**. \n3. Enter the below information : \n\n\t a. Location - The location in which the data collection rules and data collection endpoints should be deployed\n\n\t b. WorkspaceName - Enter Microsoft Sentinel Workspace Name of Log Analytics workspace\n\n\t c. AzureClientID - Enter Azure Client ID that you have created during app registration\n\n\t d. AzureClientSecret - Enter Azure Client Secret that you have created during creating the client secret\n\n\t e. AzureTenantID - Enter Azure Tenant ID of your Azure Active Directory\n\n\t f. AzureEntraObjectID - Enter Object id of your Microsoft Entra App\n\n\t g. MimecastBaseURL - Enter Base URL of Mimecast API 2.0 (e.g. https://api.services.mimecast.com)\n\n\t h. MimecastClientID - Enter Mimecast Client ID for authentication\n\n\t i. MimecastClientSecret - Enter Mimecast Client Secret for authentication\n\n\t j. MimecastAwarenessPerformanceDetailsTableName - Enter name of the table used to store Awareness Performance Details data. Default is 'Awareness_Performance_Details'\n\n\t k. MimecastAwarenessUserDataTableName - Enter name of the table used to store Awareness User Data data. Default is 'Awareness_User_Data'\n\n\t l. MimecastAwarenessWatchlistDetailsTableName - Enter name of the table used to store Awareness Watchlist Details data. Default is 'Awareness_Watchlist_Details'\n\n\t m. MimecastAwarenessSafeScoreDetailsTableName - Enter name of the table used to store Awareness SafeScore Details data. Default is 'Awareness_SafeScore_Details'\n\n\t n. StartDate - Enter the start date in the 'yyyy-mm-dd' format. If you do not provide a date, data from the last 60 days will be fetched automatically. Ensure that the date is in the past and properly formatted\n\n\t o. Schedule - Please enter a valid Quartz cron-expression. (Example: 0 0 */1 * * *) Do not keep the value empty, minimum value is 10 minutes\n\n\t p. LogLevel - Please add log level or log severity value. By default it is set to INFO\n\n\t q. AppInsightsWorkspaceResourceId - Migrate Classic Application Insights to Log Analytic Workspace which is retiring by 29 Febraury 2024. Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""See the documentation to learn more about API on the [Rest API reference](https://integrations.mimecast.com/documentation/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastAT/Mimecast_AT_FunctionApp.json","true" +"Awareness_Watchlist_Details_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastATAPI","Mimecast","Mimecast Awareness Training","The data connector for [Mimecast Awareness Training](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- Performance Details
- Safe Score Details
- User Data
- Watchlist Details
","[{""title"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""title"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret\n5. Entra Object ID""}, {""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Mimecast Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 4 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Mimecast API authorization key(s) or Token, readily available.""}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Mimecast Awareness Training Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastAT-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-MimecastAT-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Region**. \n3. Enter the below information : \n\n\t a. Location - The location in which the data collection rules and data collection endpoints should be deployed\n\n\t b. WorkspaceName - Enter Microsoft Sentinel Workspace Name of Log Analytics workspace\n\n\t c. AzureClientID - Enter Azure Client ID that you have created during app registration\n\n\t d. AzureClientSecret - Enter Azure Client Secret that you have created during creating the client secret\n\n\t e. AzureTenantID - Enter Azure Tenant ID of your Azure Active Directory\n\n\t f. AzureEntraObjectID - Enter Object id of your Microsoft Entra App\n\n\t g. MimecastBaseURL - Enter Base URL of Mimecast API 2.0 (e.g. https://api.services.mimecast.com)\n\n\t h. MimecastClientID - Enter Mimecast Client ID for authentication\n\n\t i. MimecastClientSecret - Enter Mimecast Client Secret for authentication\n\n\t j. MimecastAwarenessPerformanceDetailsTableName - Enter name of the table used to store Awareness Performance Details data. Default is 'Awareness_Performance_Details'\n\n\t k. MimecastAwarenessUserDataTableName - Enter name of the table used to store Awareness User Data data. Default is 'Awareness_User_Data'\n\n\t l. MimecastAwarenessWatchlistDetailsTableName - Enter name of the table used to store Awareness Watchlist Details data. Default is 'Awareness_Watchlist_Details'\n\n\t m. MimecastAwarenessSafeScoreDetailsTableName - Enter name of the table used to store Awareness SafeScore Details data. Default is 'Awareness_SafeScore_Details'\n\n\t n. StartDate - Enter the start date in the 'yyyy-mm-dd' format. If you do not provide a date, data from the last 60 days will be fetched automatically. Ensure that the date is in the past and properly formatted\n\n\t o. Schedule - Please enter a valid Quartz cron-expression. (Example: 0 0 */1 * * *) Do not keep the value empty, minimum value is 10 minutes\n\n\t p. LogLevel - Please add log level or log severity value. By default it is set to INFO\n\n\t q. AppInsightsWorkspaceResourceId - Migrate Classic Application Insights to Log Analytic Workspace which is retiring by 29 Febraury 2024. Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""See the documentation to learn more about API on the [Rest API reference](https://integrations.mimecast.com/documentation/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastAT/Mimecast_AT_FunctionApp.json","true" +"Audit_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastAuditAPI","Mimecast","Mimecast Audit","The data connector for [Mimecast Audit](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to audit and authentication events within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into user activity, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
Audit
","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Configuration:"", ""description"": ""**STEP 1 - Configuration steps for the Mimecast API**\n\nGo to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later)""}, {""title"": """", ""description"": ""**STEP 2 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Mimecast API authorization key(s) or Token, readily available.""}, {""title"": """", ""description"": ""**STEP 3 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 4 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TenableVM Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 5 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": ""Deploy the Mimecast Audit Data Connector:"", ""description"": ""Use this method for automated deployment of the Mimecast Audit Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastAuditAzureDeploy-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-MimecastAuditAzureDeploy-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Region**. \n3. Enter the below information : \n\n\t a. Location - The location in which the data collection rules and data collection endpoints should be deployed\n\n\t b. WorkspaceName - Enter Microsoft Sentinel Workspace Name of Log Analytics workspace\n\n\t c. AzureClientID - Enter Azure Client ID that you have created during app registration\n\n\t d. AzureClientSecret - Enter Azure Client Secret that you have created during creating the client secret\n\n\t e. AzureTenantID - Enter Azure Tenant ID of your Azure Active Directory\n\n\t f. AzureEntraObjectID - Enter Object id of your Microsoft Entra App\n\n\t g. MimecastBaseURL - Enter Base URL of Mimecast API 2.0 (e.g. https://api.services.mimecast.com)\n\n\t h. MimecastClientID - Enter Mimecast Client ID for authentication\n\n\t i. MimecastClientSecret - Enter Mimecast Client Secret for authentication\n\n\t j. MimecastAuditTableName - Enter name of the table used to store Audit data. Default is 'Audit'\n\n\t k. StartDate - Enter the start date in the 'yyyy-mm-dd' format. If you do not provide a date, data from the last 60 days will be fetched automatically. Ensure that the date is in the past and properly formatted\n\n\t l. Schedule - Please enter a valid Quartz cron-expression. (Example: 0 0 */1 * * *) Do not keep the value empty, minimum value is 10 minutes\n\n\t m. LogLevel - Please add log level or log severity value. By default it is set to INFO\n\n\t n. AppInsightsWorkspaceResourceId - Migrate Classic Application Insights to Log Analytic Workspace which is retiring by 29 Febraury 2024. Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""See the documentation to learn more about API on the [Rest API reference](https://integrations.mimecast.com/documentation/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastAudit/Mimecast_Audit_FunctionApp.json","true" +"Cloud_Integrated_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastCIAPI","Mimecast","Mimecast Cloud Integrated","The data connector for [Mimecast Cloud Integrated](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Cloud Integrated inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.","[{""title"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""title"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret""}, {""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Configuration:"", ""description"": ""**STEP 1 - Configuration steps for the Mimecast API**\n\nGo to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later)""}, {""title"": """", ""description"": ""**STEP 2 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Mimecast API authorization key(s) or Token, readily available.""}, {""title"": """", ""description"": ""**STEP 3 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 4 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TenableVM Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 5 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Mimecast Cloud Integrated Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastCI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-MimecastCI-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Region**. \n3. Enter the below information : \n\n\t a. Location - The location in which the data collection rules and data collection endpoints should be deployed\n\n\t b. WorkspaceName - Enter Microsoft Sentinel Workspace Name of Log Analytics workspace\n\n\t c. AzureClientID - Enter Azure Client ID that you have created during app registration\n\n\t d. AzureClientSecret - Enter Azure Client Secret that you have created during creating the client secret\n\n\t e. AzureTenantID - Enter Azure Tenant ID of your Azure Active Directory\n\n\t f. AzureEntraObjectID - Enter Object id of your Microsoft Entra App\n\n\t g. MimecastBaseURL - Enter Base URL of Mimecast API 2.0 (e.g. https://api.services.mimecast.com)\n\n\t h. MimecastClientID - Enter Mimecast Client ID for authentication\n\n\t i. MimecastClientSecret - Enter Mimecast Client Secret for authentication\n\n\t j. MimecastCITableName - Enter name of the table used to store Cloud Integrated data. Default is 'Cloud_Integrated'\n\n\t k. StartDate - Enter the start date in the 'yyyy-mm-dd' format. If you do not provide a date, data from the last 60 days will be fetched automatically. Ensure that the date is in the past and properly formatted\n\n\t l. Schedule - Please enter a valid Quartz cron-expression. (Example: 0 0 */1 * * *) Do not keep the value empty, minimum value is 10 minutes\n\n\t m. LogLevel - Please add log level or log severity value. By default it is set to INFO\n\n\t n. AppInsightsWorkspaceResourceId - Migrate Classic Application Insights to Log Analytic Workspace which is retiring by 29 Febraury 2024. Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""See the documentation to learn more about API on the [Rest API reference](https://integrations.mimecast.com/documentation/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastCloudIntegrated/Mimecast_Cloud_Integrated_FunctionApp.json","true" +"Seg_Cg_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastSEGAPI","Mimecast","Mimecast Secure Email Gateway","The data connector for [Mimecast Secure Email Gateway](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) allows easy log collection from the Secure Email Gateway to surface email insight and user activity within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities. Mimecast products and features required:
- Mimecast Cloud Gateway
- Mimecast Data Leak Prevention
","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Configuration:"", ""description"": ""**STEP 1 - Configuration steps for the Mimecast API**\n\nGo to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later)""}, {""title"": """", ""description"": ""****STEP 2 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Mimecast API authorization key(s) or Token, readily available.""}, {""title"": """", ""description"": ""**STEP 3 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 4 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TenableVM Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 5 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": ""Deploy the Mimecast Secure Email Gateway Data Connector:"", ""description"": ""Use this method for automated deployment of the Mimecast Secure Email Gateway Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastSEGAzureDeploy-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-MimecastSEGAzureDeploy-azuredeploy-gov)\n2. SSelect the preferred **Subscription**, **Resource Group** and **Region**. \n3. Enter the below information : \n\n\t a. Location - The location in which the data collection rules and data collection endpoints should be deployed\n\n\t b. WorkspaceName - Enter Microsoft Sentinel Workspace Name of Log Analytics workspace\n\n\t c. AzureClientID - Enter Azure Client ID that you have created during app registration\n\n\t d. AzureClientSecret - Enter Azure Client Secret that you have created during creating the client secret\n\n\t e. AzureTenantID - Enter Azure Tenant ID of your Azure Active Directory\n\n\t f. AzureEntraObjectID - Enter Object id of your Microsoft Entra App\n\n\t g. MimecastBaseURL - Enter Base URL of Mimecast API 2.0 (e.g. https://api.services.mimecast.com)\n\n\t h. MimecastClientID - Enter Mimecast Client ID for authentication\n\n\t i. MimecastClientSecret - Enter Mimecast Client Secret for authentication\n\n\t j. MimecastCGTableName - Enter name of the table used to store CG data. Default is 'Seg_Cg'\n\n\t k. MimecastDLPTableName - Enter name of the table used to store DLP data. Default is 'Seg_Dlp'\n\n\t l. StartDate - Enter the start date in the 'yyyy-mm-dd' format. If you do not provide a date, data from the last 60 days will be fetched automatically. Ensure that the date is in the past and properly formatted\n\n\t m. Schedule - Please enter a valid Quartz cron-expression. (Example: 0 0 */1 * * *) Do not keep the value empty, minimum value is 10 minutes\n\n\t n. LogLevel - Please add log level or log severity value. By default it is set to INFO\n\n\t o. AppInsightsWorkspaceResourceId - Migrate Classic Application Insights to Log Analytic Workspace which is retiring by 29 Febraury 2024. Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""See the documentation to learn more about API on the [Rest API reference](https://integrations.mimecast.com/documentation/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastSEG/Mimecast_SEG_FunctionApp.json","true" +"Seg_Dlp_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastSEGAPI","Mimecast","Mimecast Secure Email Gateway","The data connector for [Mimecast Secure Email Gateway](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) allows easy log collection from the Secure Email Gateway to surface email insight and user activity within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities. Mimecast products and features required:
- Mimecast Cloud Gateway
- Mimecast Data Leak Prevention
","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Configuration:"", ""description"": ""**STEP 1 - Configuration steps for the Mimecast API**\n\nGo to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later)""}, {""title"": """", ""description"": ""****STEP 2 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Mimecast API authorization key(s) or Token, readily available.""}, {""title"": """", ""description"": ""**STEP 3 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 4 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TenableVM Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 5 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": ""Deploy the Mimecast Secure Email Gateway Data Connector:"", ""description"": ""Use this method for automated deployment of the Mimecast Secure Email Gateway Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastSEGAzureDeploy-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-MimecastSEGAzureDeploy-azuredeploy-gov)\n2. SSelect the preferred **Subscription**, **Resource Group** and **Region**. \n3. Enter the below information : \n\n\t a. Location - The location in which the data collection rules and data collection endpoints should be deployed\n\n\t b. WorkspaceName - Enter Microsoft Sentinel Workspace Name of Log Analytics workspace\n\n\t c. AzureClientID - Enter Azure Client ID that you have created during app registration\n\n\t d. AzureClientSecret - Enter Azure Client Secret that you have created during creating the client secret\n\n\t e. AzureTenantID - Enter Azure Tenant ID of your Azure Active Directory\n\n\t f. AzureEntraObjectID - Enter Object id of your Microsoft Entra App\n\n\t g. MimecastBaseURL - Enter Base URL of Mimecast API 2.0 (e.g. https://api.services.mimecast.com)\n\n\t h. MimecastClientID - Enter Mimecast Client ID for authentication\n\n\t i. MimecastClientSecret - Enter Mimecast Client Secret for authentication\n\n\t j. MimecastCGTableName - Enter name of the table used to store CG data. Default is 'Seg_Cg'\n\n\t k. MimecastDLPTableName - Enter name of the table used to store DLP data. Default is 'Seg_Dlp'\n\n\t l. StartDate - Enter the start date in the 'yyyy-mm-dd' format. If you do not provide a date, data from the last 60 days will be fetched automatically. Ensure that the date is in the past and properly formatted\n\n\t m. Schedule - Please enter a valid Quartz cron-expression. (Example: 0 0 */1 * * *) Do not keep the value empty, minimum value is 10 minutes\n\n\t n. LogLevel - Please add log level or log severity value. By default it is set to INFO\n\n\t o. AppInsightsWorkspaceResourceId - Migrate Classic Application Insights to Log Analytic Workspace which is retiring by 29 Febraury 2024. Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""See the documentation to learn more about API on the [Rest API reference](https://integrations.mimecast.com/documentation/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastSEG/Mimecast_SEG_FunctionApp.json","true" +"Ttp_Attachment_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastTTPAPI","Mimecast","Mimecast Targeted Threat Protection","The data connector for [Mimecast Targeted Threat Protection](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- URL Protect
- Impersonation Protect
- Attachment Protect
","[{""title"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""title"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret""}, {""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Mimecast Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 4 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Mimecast API authorization key(s) or Token, readily available.""}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Mimecast Targeted Threat Protection Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastTTPAzureDeploy-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-MimecastTTPAzureDeploy-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Region**. \n3. Enter the below information : \n\n\t a. Location - The location in which the data collection rules and data collection endpoints should be deployed\n\n\t b. WorkspaceName - Enter Microsoft Sentinel Workspace Name of Log Analytics workspace\n\n\t c. AzureClientID - Enter Azure Client ID that you have created during app registration\n\n\t d. AzureClientSecret - Enter Azure Client Secret that you have created during creating the client secret\n\n\t e. AzureTenantID - Enter Azure Tenant ID of your Azure Active Directory\n\n\t f. AzureEntraObjectID - Enter Object id of your Microsoft Entra App\n\n\t g. MimecastBaseURL - Enter Base URL of Mimecast API 2.0 (e.g. https://api.services.mimecast.com)\n\n\t h. MimecastClientID - Enter Mimecast Client ID for authentication\n\n\t i. MimecastClientSecret - Enter Mimecast Client Secret for authentication\n\n\t j. StartDate - Enter the start date in the 'yyyy-mm-dd' format. If you do not provide a date, data from the last 60 days will be fetched automatically. Ensure that the date is in the past and properly formatted\n\n\t k. MimecastTTPAttachmentTableName - Enter name of the table used to store TTP Attachment data. Default is 'Ttp_Attachment'\n\n\t l. MimecastTTPImpersonationTableName - Enter name of the table used to store TTP Impersonation data. Default is 'Ttp_Impersonation'\n\n\t m. MimecastTTPUrlTableName - Enter name of the table used to store TTP Attachment data. Default is 'Ttp_Url'\n\n\t n. Schedule - Please enter a valid Quartz cron-expression. (Example: 0 0 */1 * * *) Do not keep the value empty, minimum value is 10 minutes\n\n\t l. LogLevel - Please add log level or log severity value. By default it is set to INFO\n\n\t o. AppInsightsWorkspaceResourceId - Migrate Classic Application Insights to Log Analytic Workspace which is retiring by 29 Febraury 2024. Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""See the documentation to learn more about API on the [Rest API reference](https://integrations.mimecast.com/documentation/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastTTP/Mimecast_TTP_FunctionApp.json","true" +"Ttp_Impersonation_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastTTPAPI","Mimecast","Mimecast Targeted Threat Protection","The data connector for [Mimecast Targeted Threat Protection](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- URL Protect
- Impersonation Protect
- Attachment Protect
","[{""title"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""title"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret""}, {""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Mimecast Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 4 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Mimecast API authorization key(s) or Token, readily available.""}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Mimecast Targeted Threat Protection Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastTTPAzureDeploy-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-MimecastTTPAzureDeploy-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Region**. \n3. Enter the below information : \n\n\t a. Location - The location in which the data collection rules and data collection endpoints should be deployed\n\n\t b. WorkspaceName - Enter Microsoft Sentinel Workspace Name of Log Analytics workspace\n\n\t c. AzureClientID - Enter Azure Client ID that you have created during app registration\n\n\t d. AzureClientSecret - Enter Azure Client Secret that you have created during creating the client secret\n\n\t e. AzureTenantID - Enter Azure Tenant ID of your Azure Active Directory\n\n\t f. AzureEntraObjectID - Enter Object id of your Microsoft Entra App\n\n\t g. MimecastBaseURL - Enter Base URL of Mimecast API 2.0 (e.g. https://api.services.mimecast.com)\n\n\t h. MimecastClientID - Enter Mimecast Client ID for authentication\n\n\t i. MimecastClientSecret - Enter Mimecast Client Secret for authentication\n\n\t j. StartDate - Enter the start date in the 'yyyy-mm-dd' format. If you do not provide a date, data from the last 60 days will be fetched automatically. Ensure that the date is in the past and properly formatted\n\n\t k. MimecastTTPAttachmentTableName - Enter name of the table used to store TTP Attachment data. Default is 'Ttp_Attachment'\n\n\t l. MimecastTTPImpersonationTableName - Enter name of the table used to store TTP Impersonation data. Default is 'Ttp_Impersonation'\n\n\t m. MimecastTTPUrlTableName - Enter name of the table used to store TTP Attachment data. Default is 'Ttp_Url'\n\n\t n. Schedule - Please enter a valid Quartz cron-expression. (Example: 0 0 */1 * * *) Do not keep the value empty, minimum value is 10 minutes\n\n\t l. LogLevel - Please add log level or log severity value. By default it is set to INFO\n\n\t o. AppInsightsWorkspaceResourceId - Migrate Classic Application Insights to Log Analytic Workspace which is retiring by 29 Febraury 2024. Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""See the documentation to learn more about API on the [Rest API reference](https://integrations.mimecast.com/documentation/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastTTP/Mimecast_TTP_FunctionApp.json","true" +"Ttp_Url_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastTTPAPI","Mimecast","Mimecast Targeted Threat Protection","The data connector for [Mimecast Targeted Threat Protection](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- URL Protect
- Impersonation Protect
- Attachment Protect
","[{""title"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""title"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret""}, {""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Mimecast Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 4 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Mimecast API authorization key(s) or Token, readily available.""}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Mimecast Targeted Threat Protection Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastTTPAzureDeploy-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-MimecastTTPAzureDeploy-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Region**. \n3. Enter the below information : \n\n\t a. Location - The location in which the data collection rules and data collection endpoints should be deployed\n\n\t b. WorkspaceName - Enter Microsoft Sentinel Workspace Name of Log Analytics workspace\n\n\t c. AzureClientID - Enter Azure Client ID that you have created during app registration\n\n\t d. AzureClientSecret - Enter Azure Client Secret that you have created during creating the client secret\n\n\t e. AzureTenantID - Enter Azure Tenant ID of your Azure Active Directory\n\n\t f. AzureEntraObjectID - Enter Object id of your Microsoft Entra App\n\n\t g. MimecastBaseURL - Enter Base URL of Mimecast API 2.0 (e.g. https://api.services.mimecast.com)\n\n\t h. MimecastClientID - Enter Mimecast Client ID for authentication\n\n\t i. MimecastClientSecret - Enter Mimecast Client Secret for authentication\n\n\t j. StartDate - Enter the start date in the 'yyyy-mm-dd' format. If you do not provide a date, data from the last 60 days will be fetched automatically. Ensure that the date is in the past and properly formatted\n\n\t k. MimecastTTPAttachmentTableName - Enter name of the table used to store TTP Attachment data. Default is 'Ttp_Attachment'\n\n\t l. MimecastTTPImpersonationTableName - Enter name of the table used to store TTP Impersonation data. Default is 'Ttp_Impersonation'\n\n\t m. MimecastTTPUrlTableName - Enter name of the table used to store TTP Attachment data. Default is 'Ttp_Url'\n\n\t n. Schedule - Please enter a valid Quartz cron-expression. (Example: 0 0 */1 * * *) Do not keep the value empty, minimum value is 10 minutes\n\n\t l. LogLevel - Please add log level or log severity value. By default it is set to INFO\n\n\t o. AppInsightsWorkspaceResourceId - Migrate Classic Application Insights to Log Analytic Workspace which is retiring by 29 Febraury 2024. Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""See the documentation to learn more about API on the [Rest API reference](https://integrations.mimecast.com/documentation/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastTTP/Mimecast_TTP_FunctionApp.json","true" +"MimecastAudit_CL","MimecastAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastAudit","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecastaudit","2022-02-24","2022-02-24","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastAuditAPI","Mimecast","Mimecast Audit & Authentication","The data connector for [Mimecast Audit & Authentication](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to audit and authentication events within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into user activity, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
Audit & Authentication
","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Configuration:"", ""description"": ""**STEP 1 - Configuration steps for the Mimecast API**\n\nGo to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later)""}, {""title"": """", ""description"": ""**STEP 2 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Mimecast API authorization key(s) or Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Deploy the Mimecast Audit & Authentication Data Connector:"", ""description"": ""\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastAudit-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following fields:\n - appName: Unique string that will be used as id for the app in Azure platform\n - objectId: Azure portal ---> Azure Active Directory ---> more info ---> Profile -----> Object ID\n - appInsightsLocation(default): westeurope\n - mimecastEmail: Email address of dedicated user for this integraion\n - mimecastPassword: Password for dedicated user\n - mimecastAppId: Application Id from the Microsoft Sentinel app registered with Mimecast\n - mimecastAppKey: Application Key from the Microsoft Sentinel app registered with Mimecast\n - mimecastAccessKey: Access Key for the dedicated Mimecast user\n - mimecastSecretKey: Secret Key for dedicated Mimecast user\n - mimecastBaseURL: Regional Mimecast API Base URL\n - activeDirectoryAppId: Azure portal ---> App registrations ---> [your_app] ---> Application ID\n - activeDirectoryAppSecret: Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> [your_app_secret]\n - workspaceId: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Workspace ID (or you can copy workspaceId from above) \n - workspaceKey: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Primary Key (or you can copy workspaceKey from above) \n - AppInsightsWorkspaceResourceID : Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Properties ---> Resource ID \n\n >Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy. \n\n6. Go to ***Azure portal ---> Resource groups ---> [your_resource_group] ---> [appName](type: Storage account) ---> Storage Explorer ---> BLOB CONTAINERS ---> Audit checkpoints ---> Upload*** and create empty file on your machine named checkpoint.txt and select it for upload (this is done so that date_range for SIEM logs is stored in consistent state)\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Mimecast API credentials"", ""description"": ""You need to have the following pieces of information to configure the integration:\n- mimecastEmail: Email address of a dedicated Mimecast admin user\n- mimecastPassword: Password for the dedicated Mimecast admin user\n- mimecastAppId: API Application Id of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAppKey: API Application Key of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAccessKey: Access Key for the dedicated Mimecast admin user\n- mimecastSecretKey: Secret Key for the dedicated Mimecast admin user\n- mimecastBaseURL: Mimecast Regional API Base URL\n\n> The Mimecast Application Id, Application Key, along with the Access Key and Secret keys for the dedicated Mimecast admin user are obtainable via the Mimecast Administration Console: Administration | Services | API and Platform Integrations.\n\n> The Mimecast API Base URL for each region is documented here: https://integrations.mimecast.com/documentation/api-overview/global-base-urls/""}, {""name"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""name"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastAudit/Data%20Connectors/MimecastAudit_API_AzureFunctionApp.json","true" +"MimecastDLP_CL","MimecastSEG","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastSEG","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecastseg","2022-02-24","2022-02-24","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastSIEMAPI","Mimecast","Mimecast Secure Email Gateway","The data connector for [Mimecast Secure Email Gateway](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) allows easy log collection from the Secure Email Gateway to surface email insight and user activity within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities. Mimecast products and features required:
- Mimecast Secure Email Gateway
- Mimecast Data Leak Prevention
","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Configuration:"", ""description"": ""**STEP 1 - Configuration steps for the Mimecast API**\n\nGo to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later)""}, {""title"": """", ""description"": ""**STEP 2 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Mimecast API authorization key(s) or Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Deploy the Mimecast Secure Email Gateway Data Connector:"", ""description"": ""\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastSEG-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following fields:\n - appName: Unique string that will be used as id for the app in Azure platform\n - objectId: Azure portal ---> Azure Active Directory ---> more info ---> Profile -----> Object ID\n - appInsightsLocation(default): westeurope\n - mimecastEmail: Email address of dedicated user for this integraion\n - mimecastPassword: Password for dedicated user\n - mimecastAppId: Application Id from the Microsoft Sentinel app registered with Mimecast\n - mimecastAppKey: Application Key from the Microsoft Sentinel app registered with Mimecast\n - mimecastAccessKey: Access Key for the dedicated Mimecast user\n - mimecastSecretKey: Secret Key for dedicated Mimecast user\n - mimecastBaseURL: Regional Mimecast API Base URL\n - activeDirectoryAppId: Azure portal ---> App registrations ---> [your_app] ---> Application ID\n - activeDirectoryAppSecret: Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> [your_app_secret]\n - workspaceId: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Workspace ID (or you can copy workspaceId from above) \n - workspaceKey: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Primary Key (or you can copy workspaceKey from above) \n - AppInsightsWorkspaceResourceID : Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Properties ---> Resource ID \n\n >Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n\n6. Go to ***Azure portal ---> Resource groups ---> [your_resource_group] ---> [appName](type: Storage account) ---> Storage Explorer ---> BLOB CONTAINERS ---> SIEM checkpoints ---> Upload*** and create empty file on your machine named checkpoint.txt, dlp-checkpoint.txt and select it for upload (this is done so that date_range for SIEM logs is stored in consistent state)\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Mimecast API credentials"", ""description"": ""You need to have the following pieces of information to configure the integration:\n- mimecastEmail: Email address of a dedicated Mimecast admin user\n- mimecastPassword: Password for the dedicated Mimecast admin user\n- mimecastAppId: API Application Id of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAppKey: API Application Key of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAccessKey: Access Key for the dedicated Mimecast admin user\n- mimecastSecretKey: Secret Key for the dedicated Mimecast admin user\n- mimecastBaseURL: Mimecast Regional API Base URL\n\n> The Mimecast Application Id, Application Key, along with the Access Key and Secret keys for the dedicated Mimecast admin user are obtainable via the Mimecast Administration Console: Administration | Services | API and Platform Integrations.\n\n> The Mimecast API Base URL for each region is documented here: https://integrations.mimecast.com/documentation/api-overview/global-base-urls/""}, {""name"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""name"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastSEG/Data%20Connectors/MimecastSEG_API_AzureFunctionApp.json","true" +"MimecastSIEM_CL","MimecastSEG","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastSEG","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecastseg","2022-02-24","2022-02-24","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastSIEMAPI","Mimecast","Mimecast Secure Email Gateway","The data connector for [Mimecast Secure Email Gateway](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) allows easy log collection from the Secure Email Gateway to surface email insight and user activity within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities. Mimecast products and features required:
- Mimecast Secure Email Gateway
- Mimecast Data Leak Prevention
","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Configuration:"", ""description"": ""**STEP 1 - Configuration steps for the Mimecast API**\n\nGo to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later)""}, {""title"": """", ""description"": ""**STEP 2 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Mimecast API authorization key(s) or Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Deploy the Mimecast Secure Email Gateway Data Connector:"", ""description"": ""\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastSEG-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following fields:\n - appName: Unique string that will be used as id for the app in Azure platform\n - objectId: Azure portal ---> Azure Active Directory ---> more info ---> Profile -----> Object ID\n - appInsightsLocation(default): westeurope\n - mimecastEmail: Email address of dedicated user for this integraion\n - mimecastPassword: Password for dedicated user\n - mimecastAppId: Application Id from the Microsoft Sentinel app registered with Mimecast\n - mimecastAppKey: Application Key from the Microsoft Sentinel app registered with Mimecast\n - mimecastAccessKey: Access Key for the dedicated Mimecast user\n - mimecastSecretKey: Secret Key for dedicated Mimecast user\n - mimecastBaseURL: Regional Mimecast API Base URL\n - activeDirectoryAppId: Azure portal ---> App registrations ---> [your_app] ---> Application ID\n - activeDirectoryAppSecret: Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> [your_app_secret]\n - workspaceId: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Workspace ID (or you can copy workspaceId from above) \n - workspaceKey: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Primary Key (or you can copy workspaceKey from above) \n - AppInsightsWorkspaceResourceID : Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Properties ---> Resource ID \n\n >Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n\n6. Go to ***Azure portal ---> Resource groups ---> [your_resource_group] ---> [appName](type: Storage account) ---> Storage Explorer ---> BLOB CONTAINERS ---> SIEM checkpoints ---> Upload*** and create empty file on your machine named checkpoint.txt, dlp-checkpoint.txt and select it for upload (this is done so that date_range for SIEM logs is stored in consistent state)\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Mimecast API credentials"", ""description"": ""You need to have the following pieces of information to configure the integration:\n- mimecastEmail: Email address of a dedicated Mimecast admin user\n- mimecastPassword: Password for the dedicated Mimecast admin user\n- mimecastAppId: API Application Id of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAppKey: API Application Key of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAccessKey: Access Key for the dedicated Mimecast admin user\n- mimecastSecretKey: Secret Key for the dedicated Mimecast admin user\n- mimecastBaseURL: Mimecast Regional API Base URL\n\n> The Mimecast Application Id, Application Key, along with the Access Key and Secret keys for the dedicated Mimecast admin user are obtainable via the Mimecast Administration Console: Administration | Services | API and Platform Integrations.\n\n> The Mimecast API Base URL for each region is documented here: https://integrations.mimecast.com/documentation/api-overview/global-base-urls/""}, {""name"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""name"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastSEG/Data%20Connectors/MimecastSEG_API_AzureFunctionApp.json","true" +"Event","MimecastTIRegional","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTIRegional","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecasttiregional","2023-08-23","2023-09-11","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastTIRegionalConnectorAzureFunctions","Mimecast","Mimecast Intelligence for Microsoft - Microsoft Sentinel","The data connector for Mimecast Intelligence for Microsoft provides regional threat intelligence curated from Mimecast’s email inspection technologies with pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times.
Mimecast products and features required:
- Mimecast Secure Email Gateway
- Mimecast Threat Intelligence
","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Configuration:"", ""description"": ""**STEP 1 - Configuration steps for the Mimecast API**\n\nGo to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later)""}, {""title"": """", ""description"": ""**STEP 2 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Mimecast API authorization key(s) or Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Enable Mimecast Intelligence for Microsoft - Microsoft Sentinel Connector:"", ""description"": ""\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastTIRegional-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following fields:\n - appName: Unique string that will be used as id for the app in Azure platform\n - objectId: Azure portal ---> Azure Active Directory ---> more info ---> Profile -----> Object ID\n - appInsightsLocation(default): westeurope\n - mimecastEmail: Email address of dedicated user for this integraion\n - mimecastPassword: Password for dedicated user\n - mimecastAppId: Application Id from the Microsoft Sentinel app registered with Mimecast\n - mimecastAppKey: Application Key from the Microsoft Sentinel app registered with Mimecast\n - mimecastAccessKey: Access Key for the dedicated Mimecast user\n - mimecastSecretKey: Secret Key for dedicated Mimecast user\n - mimecastBaseURL: Regional Mimecast API Base URL\n - activeDirectoryAppId: Azure portal ---> App registrations ---> [your_app] ---> Application ID\n - activeDirectoryAppSecret: Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> [your_app_secret]\n - workspaceId: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Workspace ID (or you can copy workspaceId from above) \n - workspaceKey: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Primary Key (or you can copy workspaceKey from above) \n - AppInsightsWorkspaceResourceID : Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Properties ---> Resource ID \n\n >Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n\n6. Go to ***Azure portal ---> Resource groups ---> [your_resource_group] ---> [appName](type: Storage account) ---> Storage Explorer ---> BLOB CONTAINERS ---> TIR checkpoints ---> Upload*** and create empty file on your machine named checkpoint.txt and select it for upload (this is done so that date_range for TIR logs is stored in consistent state)\n""}, {""title"": ""Additional configuration:"", ""description"": "">Connect to a **Threat Intelligence Platforms** Data Connector. Follow instructions on the connector page and then click connect button.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Mimecast API credentials"", ""description"": ""You need to have the following pieces of information to configure the integration:\n- mimecastEmail: Email address of a dedicated Mimecast admin user\n- mimecastPassword: Password for the dedicated Mimecast admin user\n- mimecastAppId: API Application Id of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAppKey: API Application Key of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAccessKey: Access Key for the dedicated Mimecast admin user\n- mimecastSecretKey: Secret Key for the dedicated Mimecast admin user\n- mimecastBaseURL: Mimecast Regional API Base URL\n\n> The Mimecast Application Id, Application Key, along with the Access Key and Secret keys for the dedicated Mimecast admin user are obtainable via the Mimecast Administration Console: Administration | Services | API and Platform Integrations.\n\n> The Mimecast API Base URL for each region is documented here: https://integrations.mimecast.com/documentation/api-overview/global-base-urls/""}, {""name"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""name"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTIRegional/Data%20Connectors/MimecastTIRegional_API_AzureFunctionApp.json","true" +"ThreatIntelligenceIndicator","MimecastTIRegional","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTIRegional","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecasttiregional","2023-08-23","2023-09-11","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastTIRegionalConnectorAzureFunctions","Mimecast","Mimecast Intelligence for Microsoft - Microsoft Sentinel","The data connector for Mimecast Intelligence for Microsoft provides regional threat intelligence curated from Mimecast’s email inspection technologies with pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times.
Mimecast products and features required:
- Mimecast Secure Email Gateway
- Mimecast Threat Intelligence
","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Configuration:"", ""description"": ""**STEP 1 - Configuration steps for the Mimecast API**\n\nGo to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later)""}, {""title"": """", ""description"": ""**STEP 2 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Mimecast API authorization key(s) or Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Enable Mimecast Intelligence for Microsoft - Microsoft Sentinel Connector:"", ""description"": ""\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastTIRegional-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following fields:\n - appName: Unique string that will be used as id for the app in Azure platform\n - objectId: Azure portal ---> Azure Active Directory ---> more info ---> Profile -----> Object ID\n - appInsightsLocation(default): westeurope\n - mimecastEmail: Email address of dedicated user for this integraion\n - mimecastPassword: Password for dedicated user\n - mimecastAppId: Application Id from the Microsoft Sentinel app registered with Mimecast\n - mimecastAppKey: Application Key from the Microsoft Sentinel app registered with Mimecast\n - mimecastAccessKey: Access Key for the dedicated Mimecast user\n - mimecastSecretKey: Secret Key for dedicated Mimecast user\n - mimecastBaseURL: Regional Mimecast API Base URL\n - activeDirectoryAppId: Azure portal ---> App registrations ---> [your_app] ---> Application ID\n - activeDirectoryAppSecret: Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> [your_app_secret]\n - workspaceId: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Workspace ID (or you can copy workspaceId from above) \n - workspaceKey: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Primary Key (or you can copy workspaceKey from above) \n - AppInsightsWorkspaceResourceID : Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Properties ---> Resource ID \n\n >Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n\n6. Go to ***Azure portal ---> Resource groups ---> [your_resource_group] ---> [appName](type: Storage account) ---> Storage Explorer ---> BLOB CONTAINERS ---> TIR checkpoints ---> Upload*** and create empty file on your machine named checkpoint.txt and select it for upload (this is done so that date_range for TIR logs is stored in consistent state)\n""}, {""title"": ""Additional configuration:"", ""description"": "">Connect to a **Threat Intelligence Platforms** Data Connector. Follow instructions on the connector page and then click connect button.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Mimecast API credentials"", ""description"": ""You need to have the following pieces of information to configure the integration:\n- mimecastEmail: Email address of a dedicated Mimecast admin user\n- mimecastPassword: Password for the dedicated Mimecast admin user\n- mimecastAppId: API Application Id of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAppKey: API Application Key of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAccessKey: Access Key for the dedicated Mimecast admin user\n- mimecastSecretKey: Secret Key for the dedicated Mimecast admin user\n- mimecastBaseURL: Mimecast Regional API Base URL\n\n> The Mimecast Application Id, Application Key, along with the Access Key and Secret keys for the dedicated Mimecast admin user are obtainable via the Mimecast Administration Console: Administration | Services | API and Platform Integrations.\n\n> The Mimecast API Base URL for each region is documented here: https://integrations.mimecast.com/documentation/api-overview/global-base-urls/""}, {""name"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""name"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTIRegional/Data%20Connectors/MimecastTIRegional_API_AzureFunctionApp.json","true" +"MimecastTTPAttachment_CL","MimecastTTP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTTP","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecastttp","2022-02-24","2022-02-24","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastTTPAPI","Mimecast","Mimecast Targeted Threat Protection","The data connector for [Mimecast Targeted Threat Protection](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- URL Protect
- Impersonation Protect
- Attachment Protect
","[{""title"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""title"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret""}, {""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Configuration:"", ""description"": ""**STEP 1 - Configuration steps for the Mimecast API**\n\nGo to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later)""}, {""title"": """", ""description"": ""**STEP 2 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Mimecast API authorization key(s) or Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Deploy the Mimecast Targeted Threat Protection Data Connector:"", ""description"": ""\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastTTP-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following fields:\n - appName: Unique string that will be used as id for the app in Azure platform\n - objectId: Azure portal ---> Azure Active Directory ---> more info ---> Profile -----> Object ID\n - appInsightsLocation(default): westeurope\n - mimecastEmail: Email address of dedicated user for this integraion\n - mimecastPassword: Password for dedicated user\n - mimecastAppId: Application Id from the Microsoft Sentinel app registered with Mimecast\n - mimecastAppKey: Application Key from the Microsoft Sentinel app registered with Mimecast\n - mimecastAccessKey: Access Key for the dedicated Mimecast user\n - mimecastSecretKey: Secret Key for dedicated Mimecast user\n - mimecastBaseURL: Regional Mimecast API Base URL\n - activeDirectoryAppId: Azure portal ---> App registrations ---> [your_app] ---> Application ID\n - activeDirectoryAppSecret: Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> [your_app_secret]\n - workspaceId: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Workspace ID (or you can copy workspaceId from above) \n - workspaceKey: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Primary Key (or you can copy workspaceKey from above) \n - AppInsightsWorkspaceResourceID : Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Properties ---> Resource ID \n\n >Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n\n6. Go to ***Azure portal ---> Resource groups ---> [your_resource_group] ---> [appName](type: Storage account) ---> Storage Explorer ---> BLOB CONTAINERS ---> TTP checkpoints ---> Upload*** and create empty files on your machine named attachment-checkpoint.txt, impersonation-checkpoint.txt, url-checkpoint.txt and select them for upload (this is done so that date_range for TTP logs are stored in consistent state)\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""You need to have the following pieces of information to configure the integration:\n- mimecastEmail: Email address of a dedicated Mimecast admin user\n- mimecastPassword: Password for the dedicated Mimecast admin user\n- mimecastAppId: API Application Id of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAppKey: API Application Key of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAccessKey: Access Key for the dedicated Mimecast admin user\n- mimecastSecretKey: Secret Key for the dedicated Mimecast admin user\n- mimecastBaseURL: Mimecast Regional API Base URL\n\n> The Mimecast Application Id, Application Key, along with the Access Key and Secret keys for the dedicated Mimecast admin user are obtainable via the Mimecast Administration Console: Administration | Services | API and Platform Integrations.\n\n> The Mimecast API Base URL for each region is documented here: https://integrations.mimecast.com/documentation/api-overview/global-base-urls/""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTTP/Data%20Connectors/MimecastTTP_API_FunctionApp.json","true" +"MimecastTTPImpersonation_CL","MimecastTTP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTTP","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecastttp","2022-02-24","2022-02-24","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastTTPAPI","Mimecast","Mimecast Targeted Threat Protection","The data connector for [Mimecast Targeted Threat Protection](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- URL Protect
- Impersonation Protect
- Attachment Protect
","[{""title"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""title"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret""}, {""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Configuration:"", ""description"": ""**STEP 1 - Configuration steps for the Mimecast API**\n\nGo to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later)""}, {""title"": """", ""description"": ""**STEP 2 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Mimecast API authorization key(s) or Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Deploy the Mimecast Targeted Threat Protection Data Connector:"", ""description"": ""\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastTTP-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following fields:\n - appName: Unique string that will be used as id for the app in Azure platform\n - objectId: Azure portal ---> Azure Active Directory ---> more info ---> Profile -----> Object ID\n - appInsightsLocation(default): westeurope\n - mimecastEmail: Email address of dedicated user for this integraion\n - mimecastPassword: Password for dedicated user\n - mimecastAppId: Application Id from the Microsoft Sentinel app registered with Mimecast\n - mimecastAppKey: Application Key from the Microsoft Sentinel app registered with Mimecast\n - mimecastAccessKey: Access Key for the dedicated Mimecast user\n - mimecastSecretKey: Secret Key for dedicated Mimecast user\n - mimecastBaseURL: Regional Mimecast API Base URL\n - activeDirectoryAppId: Azure portal ---> App registrations ---> [your_app] ---> Application ID\n - activeDirectoryAppSecret: Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> [your_app_secret]\n - workspaceId: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Workspace ID (or you can copy workspaceId from above) \n - workspaceKey: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Primary Key (or you can copy workspaceKey from above) \n - AppInsightsWorkspaceResourceID : Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Properties ---> Resource ID \n\n >Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n\n6. Go to ***Azure portal ---> Resource groups ---> [your_resource_group] ---> [appName](type: Storage account) ---> Storage Explorer ---> BLOB CONTAINERS ---> TTP checkpoints ---> Upload*** and create empty files on your machine named attachment-checkpoint.txt, impersonation-checkpoint.txt, url-checkpoint.txt and select them for upload (this is done so that date_range for TTP logs are stored in consistent state)\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""You need to have the following pieces of information to configure the integration:\n- mimecastEmail: Email address of a dedicated Mimecast admin user\n- mimecastPassword: Password for the dedicated Mimecast admin user\n- mimecastAppId: API Application Id of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAppKey: API Application Key of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAccessKey: Access Key for the dedicated Mimecast admin user\n- mimecastSecretKey: Secret Key for the dedicated Mimecast admin user\n- mimecastBaseURL: Mimecast Regional API Base URL\n\n> The Mimecast Application Id, Application Key, along with the Access Key and Secret keys for the dedicated Mimecast admin user are obtainable via the Mimecast Administration Console: Administration | Services | API and Platform Integrations.\n\n> The Mimecast API Base URL for each region is documented here: https://integrations.mimecast.com/documentation/api-overview/global-base-urls/""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTTP/Data%20Connectors/MimecastTTP_API_FunctionApp.json","true" +"MimecastTTPUrl_CL","MimecastTTP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTTP","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecastttp","2022-02-24","2022-02-24","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastTTPAPI","Mimecast","Mimecast Targeted Threat Protection","The data connector for [Mimecast Targeted Threat Protection](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- URL Protect
- Impersonation Protect
- Attachment Protect
","[{""title"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""title"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret""}, {""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Configuration:"", ""description"": ""**STEP 1 - Configuration steps for the Mimecast API**\n\nGo to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later)""}, {""title"": """", ""description"": ""**STEP 2 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Mimecast API authorization key(s) or Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Deploy the Mimecast Targeted Threat Protection Data Connector:"", ""description"": ""\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastTTP-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following fields:\n - appName: Unique string that will be used as id for the app in Azure platform\n - objectId: Azure portal ---> Azure Active Directory ---> more info ---> Profile -----> Object ID\n - appInsightsLocation(default): westeurope\n - mimecastEmail: Email address of dedicated user for this integraion\n - mimecastPassword: Password for dedicated user\n - mimecastAppId: Application Id from the Microsoft Sentinel app registered with Mimecast\n - mimecastAppKey: Application Key from the Microsoft Sentinel app registered with Mimecast\n - mimecastAccessKey: Access Key for the dedicated Mimecast user\n - mimecastSecretKey: Secret Key for dedicated Mimecast user\n - mimecastBaseURL: Regional Mimecast API Base URL\n - activeDirectoryAppId: Azure portal ---> App registrations ---> [your_app] ---> Application ID\n - activeDirectoryAppSecret: Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> [your_app_secret]\n - workspaceId: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Workspace ID (or you can copy workspaceId from above) \n - workspaceKey: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Primary Key (or you can copy workspaceKey from above) \n - AppInsightsWorkspaceResourceID : Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Properties ---> Resource ID \n\n >Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n\n6. Go to ***Azure portal ---> Resource groups ---> [your_resource_group] ---> [appName](type: Storage account) ---> Storage Explorer ---> BLOB CONTAINERS ---> TTP checkpoints ---> Upload*** and create empty files on your machine named attachment-checkpoint.txt, impersonation-checkpoint.txt, url-checkpoint.txt and select them for upload (this is done so that date_range for TTP logs are stored in consistent state)\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""You need to have the following pieces of information to configure the integration:\n- mimecastEmail: Email address of a dedicated Mimecast admin user\n- mimecastPassword: Password for the dedicated Mimecast admin user\n- mimecastAppId: API Application Id of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAppKey: API Application Key of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAccessKey: Access Key for the dedicated Mimecast admin user\n- mimecastSecretKey: Secret Key for the dedicated Mimecast admin user\n- mimecastBaseURL: Mimecast Regional API Base URL\n\n> The Mimecast Application Id, Application Key, along with the Access Key and Secret keys for the dedicated Mimecast admin user are obtainable via the Mimecast Administration Console: Administration | Services | API and Platform Integrations.\n\n> The Mimecast API Base URL for each region is documented here: https://integrations.mimecast.com/documentation/api-overview/global-base-urls/""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTTP/Data%20Connectors/MimecastTTP_API_FunctionApp.json","true" +"","Minemeld","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Minemeld","azuresentinel","azure-sentinel-solution-minemeld","2022-10-11","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"MDBALogTable_CL","MongoDBAtlas","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MongoDBAtlas","mongodb","azure-sentinel-solution-mongodbatlas","2025-08-22","","","MongoDB","Partner","https://www.mongodb.com/company/contact","","domains","MongoDBAtlasLogsAzureFunctions","MongoDB","MongoDB Atlas Logs","The [MongoDBAtlas](https://www.mongodb.com/products/platform/atlas-database) Logs connector gives the capability to upload MongoDB Atlas database logs into Microsoft Sentinel through the MongoDB Atlas Administration API. Refer to the [API documentation](https://www.mongodb.com/docs/api/doc/atlas-admin-api-v2/) for more information. The connector provides the ability to get a range of database log messages for the specified hosts and specified project.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to 'MongoDB Atlas' to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">Ensure the workspace is added to Microsoft Sentinel before deploying the connector.""}, {""title"": ""STEP 1 - Configuration steps for the 'MongoDB Atlas Administration API'"", ""description"": ""1. [Follow these instructions](https://www.mongodb.com/docs/atlas/configure-api-access/#grant-programmatic-access-to-an-organization) to create a MongoDB Atlas service account.\n2. Copy the **Client ID** and **Client Secret** you created, also the **Group ID** (Project) and each **Cluster ID** (Hostname) required for later steps.\n3. Refer [MongoDB Atlas API documentation](https://www.mongodb.com/docs/api/doc/atlas-admin-api-v2/operation/operation-downloadgroupclusterlog) for more details.\n4. The client secret can be passed into the connector via an Azure key vault or directly into the connector.\n5. If you want to use the key vault option create a key vault, using a Vault Access Policy, with a secret named **mongodb-client-secret** and your client secret saved as the secret value.""}, {""title"": ""STEP 2 - Deploy the 'MongoDB Atlas Logs' connector and the associated Azure Function"", ""description"": ""\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#view/Microsoft_Azure_CreateUIDef/CustomDeploymentBlade/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FMongoDBAtlas%2FData%20Connectors%2FMongoDBAtlasLogs%2Fazuredeploy_Connector_MongoDBAtlasLogs_AzureFunction.json/uiFormDefinitionUri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FMongoDBAtlas%2FData%20Connectors%2FMongoDBAtlasLogs%2FcreateUiDef.json)""}, {""title"": ""STEP 3 - Set the connector parameters"", ""description"": ""1. Select the preferred **Subscription** and an existing **Resource Group**.\n2. Enter an existing **Log Analytics Workspace Resource ID** belonging to the resource group.\n3. Click **Next**\n4. Enter the **MongoDB Group ID**, a list of up to 10 **MongoDB Cluster IDs**, each on a separate line, and **MongoDB Client ID**.\n5. Choose for **Authentication Method** either **Client Secret** and copy in your client secret value or **Key Vault** and copy in the name of your key vault. \nClick **Next** \n6. Review the MongoDB filters. Select logs from at least one category. Click **Next** \n7. Review the schedule. Click **Next** \n8. Review the settings then click **Create**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""MongoDB Atlas service account **Client ID** and **Client Secret** are required. [See the documentation to learn more about creating a service account](https://www.mongodb.com/docs/atlas/configure-api-access/#grant-programmatic-access-to-an-organization)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MongoDBAtlas/Data%20Connectors/MongoDBAtlasLogs/MongoDBAtlasLogs_AzureFunction.json","true" +"MongoDBAudit_CL","MongoDBAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MongoDBAudit","azuresentinel","azure-sentinel-solution-mongodbaudit","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MongoDB","MongoDB","[Deprecated] MongoDB Audit","MongoDB data connector provides the capability to ingest [MongoDBAudit](https://www.mongodb.com/) into Microsoft Sentinel. Refer to [MongoDB documentation](https://www.mongodb.com/docs/manual/tutorial/getting-started/) for more information.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias MongoDBAudit and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MongoDBAudit/Parsers/MongoDBAudit.txt) on the second line of the query, enter the hostname(s) of your MongoDBAudit device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Tomcat Server where the logs are generated.\n\n> Logs from MongoDB Enterprise Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure MongoDBAudit to write logs to files"", ""description"": ""Edit mongod.conf file (for Linux) or mongod.cfg (for Windows) to write logs to files:\n\n>**dbPath**: data/db\n\n>**path**: data/db/auditLog.json\n\nSet the following parameters: **dbPath** and **path**. Refer to the [MongoDB documentation for more details](https://www.mongodb.com/docs/manual/tutorial/configure-auditing/)""}, {""title"": ""3. Configure the logs to be collected"", ""description"": ""Configure the custom log directory to be collected"", ""instructions"": [{""parameters"": {""linkType"": ""OpenCustomLogsSettings""}, ""type"": ""InstallAgent""}]}, {""title"": """", ""description"": ""1. Select the link above to open your workspace advanced settings \n2. From the left pane, select **Settings**, select **Custom Logs** and click **+Add custom log**\n3. Click **Browse** to upload a sample of a MongoDBAudit log file. Then, click **Next >**\n4. Select **Timestamp** as the record delimiter and click **Next >**\n5. Select **Windows** or **Linux** and enter the path to MongoDBAudit logs based on your configuration \n6. After entering the path, click the '+' symbol to apply, then click **Next >** \n7. Add **MongoDBAudit** as the custom log Name (the '_CL' suffix will be added automatically) and click **Done**.""}, {""title"": ""Validate connectivity"", ""description"": ""It may take upwards of 20 minutes until your logs start to appear in Microsoft Sentinel.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MongoDBAudit/Data%20Connectors/Connector_MongoDBAudit.json","true" +"MorphisecAlerts_CL","Morphisec","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Morphisec","morphisec","morphisec_utpp_mss","2022-05-05","","","Morphisec","Partner","https://support.morphisec.com/support/home","","domains","MorphisecCCF","Morphisec","Morphisec API Data Connector (via Codeless Connector Framework)","The [Morphisec](https://www.morphisec.com/) solution for Microsoft Sentinel enables you to seamlessly ingest security alerts directly from the Morphisec API. By leveraging Morphisec's proactive breach prevention and moving target defense capabilities, this integration enriches your security operations with high-fidelity, low-noise alerts on evasive threats.
This solution provides more than just data ingestion; it equips your security team with a full suite of ready-to-use content, including: Data Connector, ASIM Parser, Analytic Rule Templates and Workbook.
With this solution, you can empower your SOC to leverage Morphisec's powerful threat prevention within a unified investigation and response workflow in Microsoft Sentinel.","[{""title"": ""Configure Morphisec Connector"", ""description"": ""1. Create an API key client in Morphisec Console with read permissions to fetch alerts. \n2. Provide the Client ID and Client Secret in the connector configuration."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Morphisec Base URL"", ""placeholder"": ""https://.morphisec.cloud"", ""type"": ""text"", ""name"": ""baseUrl""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client ID"", ""placeholder"": ""Enter the Client ID"", ""type"": ""text"", ""name"": ""clientId""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client Secret"", ""placeholder"": ""Enter the Client Secret"", ""type"": ""password"", ""name"": ""secret""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Tenant ID"", ""placeholder"": ""Enter your Morphisec Tenant ID"", ""type"": ""text"", ""name"": ""tenantId""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect to Morphisec"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Morphisec/Data%20Connectors/Morphisec_CCF/Morphisec_ConnectorDefinition.json","true" +"MuleSoft_Cloudhub_CL","Mulesoft","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mulesoft","azuresentinel","azure-sentinel-solution-mulesoft","2022-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MuleSoft","MuleSoft","MuleSoft Cloudhub","The [MuleSoft Cloudhub](https://www.mulesoft.com/platform/saas/cloudhub-ipaas-cloud-based-integration) data connector provides the capability to retrieve logs from Cloudhub applications using the Cloudhub API and more events into Microsoft Sentinel through the REST API. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Azure Blob Storage API to pull logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**MuleSoftCloudhub**](https://aka.ms/sentinel-MuleSoftCloudhub-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**Note: This data connector fetch only the logs of the CloudHub application using Platform API and not of CloudHub 2.0 application**""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the MuleSoft Cloudhub API**\n\n Follow the instructions to obtain the credentials.\n\n1. Obtain the **MuleSoftEnvId**, **MuleSoftAppName**, **MuleSoftUsername** and **MuleSoftPassword** using the [documentation](https://help.mulesoft.com/s/article/How-to-get-Cloudhub-application-information-using-Anypoint-Platform-API).\n2. Save credentials for using in the data connector.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the MuleSoft Cloudhub data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the MuleSoft Cloudhub data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MuleSoftCloudhubAPI-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **MuleSoftEnvId**, **MuleSoftAppName**, **MuleSoftUsername** and **MuleSoftPassword** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": """", ""description"": ""**Option 2 - Manual Deployment of Azure Functions**\n\n Use the following step-by-step instructions to deploy the MuleSoft Cloudhub data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-MuleSoftCloudhubAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. MuleSoftXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tMuleSoftEnvId\n\t\tMuleSoftAppName\n\t\tMuleSoftUsername\n\t\tMuleSoftPassword\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**MuleSoftEnvId**, **MuleSoftAppName**, **MuleSoftUsername** and **MuleSoftPassword** are required for making API calls.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mulesoft/Data%20Connectors/MuleSoft_Cloudhub_API_FunctionApp.json","true" +"","Multi Cloud Attack Coverage Essentials - Resource Abuse","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Multi%20Cloud%20Attack%20Coverage%20Essentials%20-%20Resource%20Abuse","azuresentinel","azure-sentinel-solution-multicloudattackcoverage","2023-11-22","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","NCSC-NL NDN Cyber Threat Intelligence Sharing","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NCSC-NL%20NDN%20Cyber%20Threat%20Intelligence%20Sharing","azuresentinel","azure-sentinel-solution-ncscnlndncti","2025-05-19","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"NGINX_CL","NGINX HTTP Server","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NGINX%20HTTP%20Server","azuresentinel","azure-sentinel-solution-nginx","2021-12-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","NGINXHTTPServer","Nginx","[Deprecated] NGINX HTTP Server","The NGINX HTTP Server data connector provides the capability to ingest [NGINX](https://nginx.org/en/) HTTP Server events into Microsoft Sentinel. Refer to [NGINX Logs documentation](https://nginx.org/en/docs/http/ngx_http_log_module.html) for more information.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias NGINXHTTPServer and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NGINX%20HTTP%20Server/Parsers/NGINXHTTPServer.txt).The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the NGINX HTTP Server where the logs are generated.\n\n> Logs from NGINX HTTP Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the custom log directory to be collected"", ""instructions"": [{""parameters"": {""linkType"": ""OpenCustomLogsSettings""}, ""type"": ""InstallAgent""}]}, {""title"": """", ""description"": ""1. Select the link above to open your workspace advanced settings \n2. From the left pane, select **Data**, select **Custom Logs** and click **Add+**\n3. Click **Browse** to upload a sample of a NGINX HTTP Server log file (e.g. access.log or error.log). Then, click **Next >**\n4. Select **New line** as the record delimiter and click **Next >**\n5. Select **Windows** or **Linux** and enter the path to NGINX HTTP logs based on your configuration. Example: \n - **Linux** Directory: '/var/log/nginx/*.log' \n6. After entering the path, click the '+' symbol to apply, then click **Next >** \n7. Add **NGINX_CL** as the custom log Name and click **Done**""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NGINX%20HTTP%20Server/Data%20Connectors/Connector_NGINX_agent.json","true" +"","NISTSP80053","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NISTSP80053","azuresentinel","azure-sentinel-solution-nistsp80053","2022-02-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"BSMmacOS_CL","NXLog BSM macOS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLog%20BSM%20macOS","nxlogltd1589381969261","nxlog_bsm_macos_mss","2022-05-02","","","NXLog","Partner","https://nxlog.co/support-tickets/add/support-ticket","","domains","NXLogBSMmacOS","NXLog","NXLog BSM macOS","The [NXLog BSM](https://docs.nxlog.co/refman/current/im/bsm.html) macOS data connector uses Sun's Basic Security Module (BSM) Auditing API to read events directly from the kernel for capturing audit events on the macOS platform. This REST API connector can efficiently export macOS audit events to Microsoft Sentinel in real-time.","[{""title"": """", ""description"": ""Follow the step-by-step instructions in the *NXLog User Guide* Integration Topic [Microsoft Sentinel](https://docs.nxlog.co/userguide/integrate/microsoft-azure-sentinel.html) to configure this connector."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLog%20BSM%20macOS/Data%20Connectors/NXLogBSMmacOS.json","true" +"NXLogFIM_CL","NXLog FIM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLog%20FIM","nxlogltd1589381969261","nxlog_fim","2022-08-15","","","NXLog","Partner","https://nxlog.co/support-tickets/add/support-ticket","","domains","NXLogFIM","NXLog","NXLog FIM","The [NXLog FIM](https://docs.nxlog.co/refman/current/im/fim.html) module allows for the scanning of files and directories, reporting detected additions, changes, renames and deletions on the designated paths through calculated checksums during successive scans. This REST API connector can efficiently export the configured FIM events to Microsoft Sentinel in real time.","[{""title"": """", ""description"": ""Follow the step-by-step instructions in the [Microsoft Sentinel](https://docs.nxlog.co/userguide/integrate/microsoft-azure-sentinel.html) integration chapter of the *NXLog User Guide* to configure this connector."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLog%20FIM/Data%20Connectors/NXLogFIM.json","true" +"LinuxAudit_CL","NXLog LinuxAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLog%20LinuxAudit","nxlogltd1589381969261","nxlog_linuxaudit_mss","2022-05-05","","","NXLog","Partner","https://nxlog.co/support-tickets/add/support-ticket","","domains","NXLogLinuxAudit","NXLog","NXLog LinuxAudit","The [NXLog LinuxAudit](https://docs.nxlog.co/refman/current/im/linuxaudit.html) data connector supports custom audit rules and collects logs without auditd or any other user-space software. IP addresses and group/user IDs are resolved to their respective names making [Linux audit](https://docs.nxlog.co/userguide/integrate/linux-audit.html) logs more intelligible to security analysts. This REST API connector can efficiently export Linux security events to Microsoft Sentinel in real-time.","[{""title"": """", ""description"": ""Follow the step-by-step instructions in the *NXLog User Guide* Integration Topic [Microsoft Sentinel](https://docs.nxlog.co/userguide/integrate/microsoft-azure-sentinel.html) to configure this connector."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLog%20LinuxAudit/Data%20Connectors/NXLogLinuxAudit.json","true" +"AIX_Audit_CL","NXLogAixAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLogAixAudit","nxlogltd1589381969261","nxlog_aix_audit","2022-05-05","","","NXLog","Partner","https://nxlog.co/support-tickets/add/support-ticket","","domains","NXLogAixAudit","NXLog","NXLog AIX Audit","The [NXLog AIX Audit](https://docs.nxlog.co/refman/current/im/aixaudit.html) data connector uses the AIX Audit subsystem to read events directly from the kernel for capturing audit events on the AIX platform. This REST API connector can efficiently export AIX Audit events to Microsoft Sentinel in real time.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**NXLog_parsed_AIX_Audit_view**](https://aka.ms/sentinel-nxlogaixaudit-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": ""Follow the step-by-step instructions in the *NXLog User Guide* Integration Guide [Microsoft Sentinel](https://docs.nxlog.co/userguide/integrate/microsoft-azure-sentinel.html) to configure this connector."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLogAixAudit/Data%20Connectors/NXLogAixAudit.json","true" +"NXLog_DNS_Server_CL","NXLogDnsLogs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLogDnsLogs","nxlogltd1589381969261","nxlog_dns_logs","2022-05-24","","","NXLog","Partner","https://nxlog.co/support-tickets/add/support-ticket","","domains","NXLogDNSLogs","NXLog","NXLog DNS Logs","The NXLog DNS Logs data connector uses Event Tracing for Windows ([ETW](https://docs.microsoft.com/windows/apps/trace-processing/overview)) for collecting both Audit and Analytical DNS Server events. The [NXLog *im_etw* module](https://docs.nxlog.co/refman/current/im/etw.html) reads event tracing data directly for maximum efficiency, without the need to capture the event trace into an .etl file. This REST API connector can forward DNS Server events to Microsoft Sentinel in real time.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on Kusto functions deployed with the Microsoft Sentinel Solution to work as expected. The [**ASimDnsMicrosoftNXLog **](https://aka.ms/sentinel-nxlogdnslogs-parser) is designed to leverage Microsoft Sentinel's built-in DNS-related analytics capabilities."", ""instructions"": []}, {""title"": """", ""description"": ""Follow the step-by-step instructions in the *NXLog User Guide* Integration Topic [Microsoft Sentinel](https://docs.nxlog.co/userguide/integrate/microsoft-azure-sentinel.html) to configure this connector."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLogDnsLogs/Data%20Connectors/NXLogDnsLogs.json","true" +"Nasuni","Nasuni","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Nasuni","nasunicorporation","nasuni-sentinel","2023-07-07","2023-07-07","","Nasuni","Partner","https://github.com/nasuni-labs/Azure-Sentinel","","domains","NasuniEdgeAppliance","Nasuni","[Deprecated] Nasuni Edge Appliance","The [Nasuni](https://www.nasuni.com/) connector allows you to easily connect your Nasuni Edge Appliance Notifications and file system audit logs with Microsoft Sentinel. This gives you more insight into activity within your Nasuni infrastructure and improves your security operation capabilities.","[{""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Follow the configuration steps below to configure your Linux machine to send Nasuni event information to Microsoft Sentinel. Refer to the [Azure Monitor Agent documenation](https://learn.microsoft.com/en-us/azure/azure-monitor/agents/agents-overview) for additional details on these steps.\nConfigure the facilities you want to collect and their severities.\n1. Select the link below to open your workspace agents configuration, and select the Syslog tab.\n2. Select Add facility and choose from the drop-down list of facilities. Repeat for all the facilities you want to add.\n3. Mark the check boxes for the desired severities for each facility.\n4. Click Apply.\n"", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure Nasuni Edge Appliance settings"", ""description"": ""Follow the instructions in the [Nasuni Management Console Guide](https://view.highspot.com/viewer/629a633ae5b4caaf17018daa?iid=5e6fbfcbc7143309f69fcfcf) to configure Nasuni Edge Appliances to forward syslog events. Use the IP address or hostname of the Linux device running the Azure Monitor Agent in the Servers configuration field for the syslog settings.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Nasuni/Data%20Connectors/Nasuni%20Data%20Connector.json","true" +"Syslog","Nasuni","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Nasuni","nasunicorporation","nasuni-sentinel","2023-07-07","2023-07-07","","Nasuni","Partner","https://github.com/nasuni-labs/Azure-Sentinel","","domains","NasuniEdgeAppliance","Nasuni","[Deprecated] Nasuni Edge Appliance","The [Nasuni](https://www.nasuni.com/) connector allows you to easily connect your Nasuni Edge Appliance Notifications and file system audit logs with Microsoft Sentinel. This gives you more insight into activity within your Nasuni infrastructure and improves your security operation capabilities.","[{""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Follow the configuration steps below to configure your Linux machine to send Nasuni event information to Microsoft Sentinel. Refer to the [Azure Monitor Agent documenation](https://learn.microsoft.com/en-us/azure/azure-monitor/agents/agents-overview) for additional details on these steps.\nConfigure the facilities you want to collect and their severities.\n1. Select the link below to open your workspace agents configuration, and select the Syslog tab.\n2. Select Add facility and choose from the drop-down list of facilities. Repeat for all the facilities you want to add.\n3. Mark the check boxes for the desired severities for each facility.\n4. Click Apply.\n"", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure Nasuni Edge Appliance settings"", ""description"": ""Follow the instructions in the [Nasuni Management Console Guide](https://view.highspot.com/viewer/629a633ae5b4caaf17018daa?iid=5e6fbfcbc7143309f69fcfcf) to configure Nasuni Edge Appliances to forward syslog events. Use the IP address or hostname of the Linux device running the Azure Monitor Agent in the Servers configuration field for the syslog settings.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Nasuni/Data%20Connectors/Nasuni%20Data%20Connector.json","true" +"Netclean_Incidents_CL","NetClean ProActive","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NetClean%20ProActive","netcleantechnologiesab1651557549734","azure-sentinel-solution-netclean-proactive","2022-06-30","","","NetClean","Partner","https://www.netclean.com/contact","","domains","Netclean_ProActive_Incidents","NetClean Technologies","Netclean ProActive Incidents","This connector uses the Netclean Webhook (required) and Logic Apps to push data into Microsoft Sentinel Log Analytics","[{""title"": """", ""description"": "">**NOTE:** NetClean ProActive uses a Webhook to expose incident data, Azure Logic Apps is used to receive and push data to Log Analytics This might result in additional data ingestion costs.\n It's possible to test this without Logic Apps or NetClean Proactive see option 2"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""1. Create a new logic app\n Use When a HTTP request is recived as the Trigger and save it. It will now have generated a URL that can be used in the ProActive webconsole configuration.\n Add an action:\n Select the Azure Log Analytics Data Collector and choose Send Data\n Enter Connection Name, Workspace ID and Workspace Key, you will find the information needed in your Log Analytics workspace under Settings-->Agents-->Log Analytics agent instructions.\n In JSON Request body add @triggerBody(). in Custom Log Name add Netclean_Incidents."", ""title"": "" Option 1: Logic app""}, {""description"": ""Ingest data using a api function. please use the script found on\n https://learn.microsoft.com/en-us/azure/azure-monitor/logs/data-collector-api?tabs=powershell \nReplace the CustomerId and SharedKey values with your values\nReplace the content in $json variable to the sample data found here: https://github.com/Azure/Azure-Sentinel/blob/master/Sample%20Data/Custom/Netclean_Incidents_CL.json .\nSet the LogType varible to **Netclean_Incidents_CL**\nRun the script"", ""title"": "" Option 2 (Testing only)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NetClean%20ProActive/Data%20Connectors/Connector_NetClean.json","true" +"Netskope_CL","Netskope","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskope","netskope","netskope_mss","2022-05-05","","","Netskope","Partner","https://www.netskope.com/services#support","","domains","Netskope","Netskope","Netskope","The [Netskope Cloud Security Platform](https://www.netskope.com/platform) connector provides the capability to ingest Netskope logs and events into Microsoft Sentinel. The connector provides visibility into Netskope Platform Events and Alerts in Microsoft Sentinel to improve monitoring and investigation capabilities.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Netskope to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Netskope and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskope/Parsers/Netskope.txt), on the second line of the query, enter the hostname(s) of your Netskope device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Netskope API**\n\n [Follow these instructions](https://docs.netskope.com/en/rest-api-v1-overview.html) provided by Netskope to obtain an API Token. **Note:** A Netskope account is required""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Netskope connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Netskope API Authorization Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""This method provides an automated deployment of the Netskope connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-netskope-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Key**, and **URI**.\n - Use the following schema for the `uri` value: `https://.goskope.com` Replace `` with your domain.\n - The default **Time Interval** is set to pull the last five (5) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion.\n - The default **Log Types** is set to pull all 6 available log types (`alert, page, application, audit, infrastructure, network`), remove any are not required. \n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n6. After successfully deploying the connector, download the Kusto Function to normalize the data fields. [Follow the steps](https://aka.ms/sentinelgithubparsersnetskope) to use the Kusto function alias, **Netskope**.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""This method provides the step-by-step instructions to deploy the Netskope connector manually with Azure Function.""}, {""title"": """", ""description"": ""**1. Create a Function App**\n\n1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp), and select **+ Add**.\n2. In the **Basics** tab, ensure Runtime stack is set to **Powershell Core**. \n3. In the **Hosting** tab, ensure the **Consumption (Serverless)** plan type is selected.\n4. Make other preferrable configuration changes, if needed, then click **Create**.""}, {""title"": """", ""description"": ""**2. Import Function App Code**\n\n1. In the newly created Function App, select **Functions** on the left pane and click **+ Add**.\n2. Select **Timer Trigger**.\n3. Enter a unique Function **Name** and modify the cron schedule, if needed. The default value is set to run the Function App every 5 minutes. (Note: the Timer trigger should match the `timeInterval` value below to prevent overlapping data), click **Create**.\n4. Click on **Code + Test** on the left pane. \n5. Copy the [Function App Code](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/Netskope/Data%20Connectors/Netskope/AzureFunctionNetskope/run.ps1) and paste into the Function App `run.ps1` editor.\n5. Click **Save**.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following seven (7) application settings individually, with their respective string values (case-sensitive): \n\t\tapikey\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\ttimeInterval\n\t\tlogTypes\n\t\tlogAnalyticsUri (optional)\n> - Enter the URI that corresponds to your region. The `uri` value must follow the following schema: `https://.goskope.com` - There is no need to add subsquent parameters to the Uri, the Function App will dynamically append the parameteres in the proper format.\n> - Set the `timeInterval` (in minutes) to the default value of `5` to correspond to the default Timer Trigger of every `5` minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly to prevent overlapping data ingestion. \n> - Set the `logTypes` to `alert, page, application, audit, infrastructure, network` - This list represents all the avaliable log types. Select the log types based on logging requirements, seperating each by a single comma.\n> - Note: If using Azure Key Vault, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.\n5. After successfully deploying the connector, download the Kusto Function to normalize the data fields. [Follow the steps](https://aka.ms/sentinelgithubparsersnetskope) to use the Kusto function alias, **Netskope**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Netskope API Token"", ""description"": ""A Netskope API Token is required. [See the documentation to learn more about Netskope API](https://innovatechcloud.goskope.com/docs/Netskope_Help/en/rest-api-v1-overview.html). **Note:** A Netskope account is required""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskope/Data%20Connectors/Netskope/Netskope_API_FunctionApp.json","true" +"NetskopeAlerts_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeAlertsEvents","Netskope","Netskope Alerts and Events","Netskope Security Alerts and Events","[{""title"": ""STEP 1 - Create a Netskope API key."", ""description"": ""Follow the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/) for guidance on this step.""}, {""title"": ""STEP 2 - Enter your Netskope product Details"", ""description"": ""Enter your Netskope organisation url & API Token below:"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Organisation Url"", ""placeholder"": ""Enter your organisation url"", ""type"": ""text"", ""name"": ""OrganisationURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""Enter your API Key"", ""type"": ""password"", ""name"": ""apikey""}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Remediation"", ""type"": ""text"", ""name"": ""NetskopeAlertsRemediationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Remediation data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Uba"", ""type"": ""text"", ""name"": ""NetskopeAlertsUbaingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Uba data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Security Assessment"", ""type"": ""text"", ""name"": ""NetskopeAlertsSecurityAssessmentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Security Assessment data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Quarantine"", ""type"": ""text"", ""name"": ""NetskopeAlertsQuarantineingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Quarantine data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Policy"", ""type"": ""text"", ""name"": ""NetskopeAlertsPolicyingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Policy data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malware"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalwareingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malware data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malsite"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalsiteingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malsite data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts DLP"", ""type"": ""text"", ""name"": ""NetskopeAlertsDlpingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts CTEP"", ""type"": ""text"", ""name"": ""NetskopeAlertsCtepingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts CTEP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Watchlist"", ""type"": ""text"", ""name"": ""NetskopeAlertsWatchlistingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Watchlist data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Compromised Credentials"", ""type"": ""text"", ""name"": ""NetskopeAlertsCompromisedCredentialsingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Compromised Credentials data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Content"", ""type"": ""text"", ""name"": ""NetskopeAlertsContentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Content data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Device"", ""type"": ""text"", ""name"": ""NetskopeAlertsDeviceingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Device data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Application"", ""type"": ""text"", ""name"": ""NetskopeEventsApplicationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Application data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Audit"", ""type"": ""text"", ""name"": ""NetskopeEventsAuditioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Audit data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Connection"", ""type"": ""text"", ""name"": ""NetskopeEventsConnectioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Connection data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events DLP"", ""type"": ""text"", ""name"": ""NetskopeEventsDLPingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Endpoint"", ""type"": ""text"", ""name"": ""NetskopeEventsEndpointingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Endpoint data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Infrastructure"", ""type"": ""text"", ""name"": ""NetskopeEventsInfrastructureingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Infrastructure data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Network"", ""type"": ""text"", ""name"": ""NetskopeEventsNetworkingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Network data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Page"", ""type"": ""text"", ""name"": ""NetskopeEventsPageingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Page data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""InstructionStepsGroup"", ""parameters"": {""instructionSteps"": [{""title"": ""OPTIONAL: Specify the Index the API uses."", ""description"": ""**Configuring the index is optional and only required in advanced scenario's.** \n Netskope uses an [index](https://docs.netskope.com/en/using-the-rest-api-v2-dataexport-iterator-endpoints/#how-do-iterator-endpoints-function) to retrieve events. In some advanced cases (consuming the event in multiple Microsoft Sentinel workspaces, or pre-fatiguing the index to only retrieve recent data), a customer might want to have direct control over the index."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Index"", ""placeholder"": ""NetskopeCCP"", ""type"": ""text"", ""name"": ""Index""}}]}]}}]}, {""title"": ""STEP 3 - Click Connect"", ""description"": ""Verify all fields above were filled in correctly. Press the Connect to connect Netskope to Microsoft Sentinel."", ""instructions"": [{""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Netskope organisation url"", ""description"": ""The Netskope data connector requires you to provide your organisation url. You can find your organisation url by signing into the Netskope portal.""}, {""name"": ""Netskope API key"", ""description"": ""The Netskope data connector requires you to provide a valid API key. You can create one by following the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeAlertsEvents_RestAPI_CCP/NetskopeAlertsEvents_ConnectorDefination.json","true" +"NetskopeEventsApplication_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeAlertsEvents","Netskope","Netskope Alerts and Events","Netskope Security Alerts and Events","[{""title"": ""STEP 1 - Create a Netskope API key."", ""description"": ""Follow the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/) for guidance on this step.""}, {""title"": ""STEP 2 - Enter your Netskope product Details"", ""description"": ""Enter your Netskope organisation url & API Token below:"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Organisation Url"", ""placeholder"": ""Enter your organisation url"", ""type"": ""text"", ""name"": ""OrganisationURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""Enter your API Key"", ""type"": ""password"", ""name"": ""apikey""}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Remediation"", ""type"": ""text"", ""name"": ""NetskopeAlertsRemediationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Remediation data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Uba"", ""type"": ""text"", ""name"": ""NetskopeAlertsUbaingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Uba data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Security Assessment"", ""type"": ""text"", ""name"": ""NetskopeAlertsSecurityAssessmentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Security Assessment data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Quarantine"", ""type"": ""text"", ""name"": ""NetskopeAlertsQuarantineingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Quarantine data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Policy"", ""type"": ""text"", ""name"": ""NetskopeAlertsPolicyingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Policy data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malware"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalwareingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malware data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malsite"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalsiteingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malsite data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts DLP"", ""type"": ""text"", ""name"": ""NetskopeAlertsDlpingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts CTEP"", ""type"": ""text"", ""name"": ""NetskopeAlertsCtepingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts CTEP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Watchlist"", ""type"": ""text"", ""name"": ""NetskopeAlertsWatchlistingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Watchlist data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Compromised Credentials"", ""type"": ""text"", ""name"": ""NetskopeAlertsCompromisedCredentialsingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Compromised Credentials data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Content"", ""type"": ""text"", ""name"": ""NetskopeAlertsContentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Content data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Device"", ""type"": ""text"", ""name"": ""NetskopeAlertsDeviceingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Device data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Application"", ""type"": ""text"", ""name"": ""NetskopeEventsApplicationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Application data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Audit"", ""type"": ""text"", ""name"": ""NetskopeEventsAuditioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Audit data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Connection"", ""type"": ""text"", ""name"": ""NetskopeEventsConnectioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Connection data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events DLP"", ""type"": ""text"", ""name"": ""NetskopeEventsDLPingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Endpoint"", ""type"": ""text"", ""name"": ""NetskopeEventsEndpointingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Endpoint data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Infrastructure"", ""type"": ""text"", ""name"": ""NetskopeEventsInfrastructureingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Infrastructure data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Network"", ""type"": ""text"", ""name"": ""NetskopeEventsNetworkingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Network data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Page"", ""type"": ""text"", ""name"": ""NetskopeEventsPageingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Page data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""InstructionStepsGroup"", ""parameters"": {""instructionSteps"": [{""title"": ""OPTIONAL: Specify the Index the API uses."", ""description"": ""**Configuring the index is optional and only required in advanced scenario's.** \n Netskope uses an [index](https://docs.netskope.com/en/using-the-rest-api-v2-dataexport-iterator-endpoints/#how-do-iterator-endpoints-function) to retrieve events. In some advanced cases (consuming the event in multiple Microsoft Sentinel workspaces, or pre-fatiguing the index to only retrieve recent data), a customer might want to have direct control over the index."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Index"", ""placeholder"": ""NetskopeCCP"", ""type"": ""text"", ""name"": ""Index""}}]}]}}]}, {""title"": ""STEP 3 - Click Connect"", ""description"": ""Verify all fields above were filled in correctly. Press the Connect to connect Netskope to Microsoft Sentinel."", ""instructions"": [{""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Netskope organisation url"", ""description"": ""The Netskope data connector requires you to provide your organisation url. You can find your organisation url by signing into the Netskope portal.""}, {""name"": ""Netskope API key"", ""description"": ""The Netskope data connector requires you to provide a valid API key. You can create one by following the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeAlertsEvents_RestAPI_CCP/NetskopeAlertsEvents_ConnectorDefination.json","true" +"NetskopeEventsAudit_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeAlertsEvents","Netskope","Netskope Alerts and Events","Netskope Security Alerts and Events","[{""title"": ""STEP 1 - Create a Netskope API key."", ""description"": ""Follow the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/) for guidance on this step.""}, {""title"": ""STEP 2 - Enter your Netskope product Details"", ""description"": ""Enter your Netskope organisation url & API Token below:"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Organisation Url"", ""placeholder"": ""Enter your organisation url"", ""type"": ""text"", ""name"": ""OrganisationURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""Enter your API Key"", ""type"": ""password"", ""name"": ""apikey""}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Remediation"", ""type"": ""text"", ""name"": ""NetskopeAlertsRemediationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Remediation data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Uba"", ""type"": ""text"", ""name"": ""NetskopeAlertsUbaingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Uba data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Security Assessment"", ""type"": ""text"", ""name"": ""NetskopeAlertsSecurityAssessmentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Security Assessment data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Quarantine"", ""type"": ""text"", ""name"": ""NetskopeAlertsQuarantineingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Quarantine data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Policy"", ""type"": ""text"", ""name"": ""NetskopeAlertsPolicyingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Policy data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malware"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalwareingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malware data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malsite"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalsiteingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malsite data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts DLP"", ""type"": ""text"", ""name"": ""NetskopeAlertsDlpingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts CTEP"", ""type"": ""text"", ""name"": ""NetskopeAlertsCtepingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts CTEP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Watchlist"", ""type"": ""text"", ""name"": ""NetskopeAlertsWatchlistingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Watchlist data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Compromised Credentials"", ""type"": ""text"", ""name"": ""NetskopeAlertsCompromisedCredentialsingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Compromised Credentials data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Content"", ""type"": ""text"", ""name"": ""NetskopeAlertsContentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Content data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Device"", ""type"": ""text"", ""name"": ""NetskopeAlertsDeviceingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Device data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Application"", ""type"": ""text"", ""name"": ""NetskopeEventsApplicationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Application data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Audit"", ""type"": ""text"", ""name"": ""NetskopeEventsAuditioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Audit data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Connection"", ""type"": ""text"", ""name"": ""NetskopeEventsConnectioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Connection data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events DLP"", ""type"": ""text"", ""name"": ""NetskopeEventsDLPingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Endpoint"", ""type"": ""text"", ""name"": ""NetskopeEventsEndpointingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Endpoint data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Infrastructure"", ""type"": ""text"", ""name"": ""NetskopeEventsInfrastructureingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Infrastructure data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Network"", ""type"": ""text"", ""name"": ""NetskopeEventsNetworkingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Network data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Page"", ""type"": ""text"", ""name"": ""NetskopeEventsPageingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Page data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""InstructionStepsGroup"", ""parameters"": {""instructionSteps"": [{""title"": ""OPTIONAL: Specify the Index the API uses."", ""description"": ""**Configuring the index is optional and only required in advanced scenario's.** \n Netskope uses an [index](https://docs.netskope.com/en/using-the-rest-api-v2-dataexport-iterator-endpoints/#how-do-iterator-endpoints-function) to retrieve events. In some advanced cases (consuming the event in multiple Microsoft Sentinel workspaces, or pre-fatiguing the index to only retrieve recent data), a customer might want to have direct control over the index."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Index"", ""placeholder"": ""NetskopeCCP"", ""type"": ""text"", ""name"": ""Index""}}]}]}}]}, {""title"": ""STEP 3 - Click Connect"", ""description"": ""Verify all fields above were filled in correctly. Press the Connect to connect Netskope to Microsoft Sentinel."", ""instructions"": [{""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Netskope organisation url"", ""description"": ""The Netskope data connector requires you to provide your organisation url. You can find your organisation url by signing into the Netskope portal.""}, {""name"": ""Netskope API key"", ""description"": ""The Netskope data connector requires you to provide a valid API key. You can create one by following the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeAlertsEvents_RestAPI_CCP/NetskopeAlertsEvents_ConnectorDefination.json","true" +"NetskopeEventsConnection_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeAlertsEvents","Netskope","Netskope Alerts and Events","Netskope Security Alerts and Events","[{""title"": ""STEP 1 - Create a Netskope API key."", ""description"": ""Follow the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/) for guidance on this step.""}, {""title"": ""STEP 2 - Enter your Netskope product Details"", ""description"": ""Enter your Netskope organisation url & API Token below:"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Organisation Url"", ""placeholder"": ""Enter your organisation url"", ""type"": ""text"", ""name"": ""OrganisationURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""Enter your API Key"", ""type"": ""password"", ""name"": ""apikey""}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Remediation"", ""type"": ""text"", ""name"": ""NetskopeAlertsRemediationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Remediation data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Uba"", ""type"": ""text"", ""name"": ""NetskopeAlertsUbaingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Uba data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Security Assessment"", ""type"": ""text"", ""name"": ""NetskopeAlertsSecurityAssessmentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Security Assessment data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Quarantine"", ""type"": ""text"", ""name"": ""NetskopeAlertsQuarantineingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Quarantine data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Policy"", ""type"": ""text"", ""name"": ""NetskopeAlertsPolicyingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Policy data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malware"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalwareingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malware data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malsite"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalsiteingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malsite data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts DLP"", ""type"": ""text"", ""name"": ""NetskopeAlertsDlpingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts CTEP"", ""type"": ""text"", ""name"": ""NetskopeAlertsCtepingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts CTEP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Watchlist"", ""type"": ""text"", ""name"": ""NetskopeAlertsWatchlistingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Watchlist data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Compromised Credentials"", ""type"": ""text"", ""name"": ""NetskopeAlertsCompromisedCredentialsingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Compromised Credentials data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Content"", ""type"": ""text"", ""name"": ""NetskopeAlertsContentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Content data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Device"", ""type"": ""text"", ""name"": ""NetskopeAlertsDeviceingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Device data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Application"", ""type"": ""text"", ""name"": ""NetskopeEventsApplicationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Application data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Audit"", ""type"": ""text"", ""name"": ""NetskopeEventsAuditioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Audit data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Connection"", ""type"": ""text"", ""name"": ""NetskopeEventsConnectioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Connection data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events DLP"", ""type"": ""text"", ""name"": ""NetskopeEventsDLPingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Endpoint"", ""type"": ""text"", ""name"": ""NetskopeEventsEndpointingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Endpoint data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Infrastructure"", ""type"": ""text"", ""name"": ""NetskopeEventsInfrastructureingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Infrastructure data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Network"", ""type"": ""text"", ""name"": ""NetskopeEventsNetworkingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Network data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Page"", ""type"": ""text"", ""name"": ""NetskopeEventsPageingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Page data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""InstructionStepsGroup"", ""parameters"": {""instructionSteps"": [{""title"": ""OPTIONAL: Specify the Index the API uses."", ""description"": ""**Configuring the index is optional and only required in advanced scenario's.** \n Netskope uses an [index](https://docs.netskope.com/en/using-the-rest-api-v2-dataexport-iterator-endpoints/#how-do-iterator-endpoints-function) to retrieve events. In some advanced cases (consuming the event in multiple Microsoft Sentinel workspaces, or pre-fatiguing the index to only retrieve recent data), a customer might want to have direct control over the index."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Index"", ""placeholder"": ""NetskopeCCP"", ""type"": ""text"", ""name"": ""Index""}}]}]}}]}, {""title"": ""STEP 3 - Click Connect"", ""description"": ""Verify all fields above were filled in correctly. Press the Connect to connect Netskope to Microsoft Sentinel."", ""instructions"": [{""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Netskope organisation url"", ""description"": ""The Netskope data connector requires you to provide your organisation url. You can find your organisation url by signing into the Netskope portal.""}, {""name"": ""Netskope API key"", ""description"": ""The Netskope data connector requires you to provide a valid API key. You can create one by following the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeAlertsEvents_RestAPI_CCP/NetskopeAlertsEvents_ConnectorDefination.json","true" +"NetskopeEventsDLP_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeAlertsEvents","Netskope","Netskope Alerts and Events","Netskope Security Alerts and Events","[{""title"": ""STEP 1 - Create a Netskope API key."", ""description"": ""Follow the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/) for guidance on this step.""}, {""title"": ""STEP 2 - Enter your Netskope product Details"", ""description"": ""Enter your Netskope organisation url & API Token below:"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Organisation Url"", ""placeholder"": ""Enter your organisation url"", ""type"": ""text"", ""name"": ""OrganisationURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""Enter your API Key"", ""type"": ""password"", ""name"": ""apikey""}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Remediation"", ""type"": ""text"", ""name"": ""NetskopeAlertsRemediationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Remediation data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Uba"", ""type"": ""text"", ""name"": ""NetskopeAlertsUbaingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Uba data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Security Assessment"", ""type"": ""text"", ""name"": ""NetskopeAlertsSecurityAssessmentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Security Assessment data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Quarantine"", ""type"": ""text"", ""name"": ""NetskopeAlertsQuarantineingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Quarantine data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Policy"", ""type"": ""text"", ""name"": ""NetskopeAlertsPolicyingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Policy data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malware"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalwareingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malware data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malsite"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalsiteingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malsite data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts DLP"", ""type"": ""text"", ""name"": ""NetskopeAlertsDlpingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts CTEP"", ""type"": ""text"", ""name"": ""NetskopeAlertsCtepingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts CTEP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Watchlist"", ""type"": ""text"", ""name"": ""NetskopeAlertsWatchlistingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Watchlist data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Compromised Credentials"", ""type"": ""text"", ""name"": ""NetskopeAlertsCompromisedCredentialsingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Compromised Credentials data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Content"", ""type"": ""text"", ""name"": ""NetskopeAlertsContentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Content data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Device"", ""type"": ""text"", ""name"": ""NetskopeAlertsDeviceingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Device data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Application"", ""type"": ""text"", ""name"": ""NetskopeEventsApplicationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Application data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Audit"", ""type"": ""text"", ""name"": ""NetskopeEventsAuditioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Audit data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Connection"", ""type"": ""text"", ""name"": ""NetskopeEventsConnectioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Connection data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events DLP"", ""type"": ""text"", ""name"": ""NetskopeEventsDLPingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Endpoint"", ""type"": ""text"", ""name"": ""NetskopeEventsEndpointingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Endpoint data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Infrastructure"", ""type"": ""text"", ""name"": ""NetskopeEventsInfrastructureingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Infrastructure data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Network"", ""type"": ""text"", ""name"": ""NetskopeEventsNetworkingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Network data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Page"", ""type"": ""text"", ""name"": ""NetskopeEventsPageingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Page data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""InstructionStepsGroup"", ""parameters"": {""instructionSteps"": [{""title"": ""OPTIONAL: Specify the Index the API uses."", ""description"": ""**Configuring the index is optional and only required in advanced scenario's.** \n Netskope uses an [index](https://docs.netskope.com/en/using-the-rest-api-v2-dataexport-iterator-endpoints/#how-do-iterator-endpoints-function) to retrieve events. In some advanced cases (consuming the event in multiple Microsoft Sentinel workspaces, or pre-fatiguing the index to only retrieve recent data), a customer might want to have direct control over the index."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Index"", ""placeholder"": ""NetskopeCCP"", ""type"": ""text"", ""name"": ""Index""}}]}]}}]}, {""title"": ""STEP 3 - Click Connect"", ""description"": ""Verify all fields above were filled in correctly. Press the Connect to connect Netskope to Microsoft Sentinel."", ""instructions"": [{""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Netskope organisation url"", ""description"": ""The Netskope data connector requires you to provide your organisation url. You can find your organisation url by signing into the Netskope portal.""}, {""name"": ""Netskope API key"", ""description"": ""The Netskope data connector requires you to provide a valid API key. You can create one by following the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeAlertsEvents_RestAPI_CCP/NetskopeAlertsEvents_ConnectorDefination.json","true" +"NetskopeEventsEndpoint_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeAlertsEvents","Netskope","Netskope Alerts and Events","Netskope Security Alerts and Events","[{""title"": ""STEP 1 - Create a Netskope API key."", ""description"": ""Follow the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/) for guidance on this step.""}, {""title"": ""STEP 2 - Enter your Netskope product Details"", ""description"": ""Enter your Netskope organisation url & API Token below:"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Organisation Url"", ""placeholder"": ""Enter your organisation url"", ""type"": ""text"", ""name"": ""OrganisationURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""Enter your API Key"", ""type"": ""password"", ""name"": ""apikey""}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Remediation"", ""type"": ""text"", ""name"": ""NetskopeAlertsRemediationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Remediation data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Uba"", ""type"": ""text"", ""name"": ""NetskopeAlertsUbaingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Uba data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Security Assessment"", ""type"": ""text"", ""name"": ""NetskopeAlertsSecurityAssessmentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Security Assessment data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Quarantine"", ""type"": ""text"", ""name"": ""NetskopeAlertsQuarantineingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Quarantine data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Policy"", ""type"": ""text"", ""name"": ""NetskopeAlertsPolicyingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Policy data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malware"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalwareingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malware data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malsite"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalsiteingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malsite data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts DLP"", ""type"": ""text"", ""name"": ""NetskopeAlertsDlpingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts CTEP"", ""type"": ""text"", ""name"": ""NetskopeAlertsCtepingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts CTEP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Watchlist"", ""type"": ""text"", ""name"": ""NetskopeAlertsWatchlistingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Watchlist data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Compromised Credentials"", ""type"": ""text"", ""name"": ""NetskopeAlertsCompromisedCredentialsingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Compromised Credentials data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Content"", ""type"": ""text"", ""name"": ""NetskopeAlertsContentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Content data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Device"", ""type"": ""text"", ""name"": ""NetskopeAlertsDeviceingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Device data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Application"", ""type"": ""text"", ""name"": ""NetskopeEventsApplicationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Application data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Audit"", ""type"": ""text"", ""name"": ""NetskopeEventsAuditioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Audit data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Connection"", ""type"": ""text"", ""name"": ""NetskopeEventsConnectioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Connection data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events DLP"", ""type"": ""text"", ""name"": ""NetskopeEventsDLPingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Endpoint"", ""type"": ""text"", ""name"": ""NetskopeEventsEndpointingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Endpoint data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Infrastructure"", ""type"": ""text"", ""name"": ""NetskopeEventsInfrastructureingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Infrastructure data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Network"", ""type"": ""text"", ""name"": ""NetskopeEventsNetworkingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Network data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Page"", ""type"": ""text"", ""name"": ""NetskopeEventsPageingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Page data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""InstructionStepsGroup"", ""parameters"": {""instructionSteps"": [{""title"": ""OPTIONAL: Specify the Index the API uses."", ""description"": ""**Configuring the index is optional and only required in advanced scenario's.** \n Netskope uses an [index](https://docs.netskope.com/en/using-the-rest-api-v2-dataexport-iterator-endpoints/#how-do-iterator-endpoints-function) to retrieve events. In some advanced cases (consuming the event in multiple Microsoft Sentinel workspaces, or pre-fatiguing the index to only retrieve recent data), a customer might want to have direct control over the index."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Index"", ""placeholder"": ""NetskopeCCP"", ""type"": ""text"", ""name"": ""Index""}}]}]}}]}, {""title"": ""STEP 3 - Click Connect"", ""description"": ""Verify all fields above were filled in correctly. Press the Connect to connect Netskope to Microsoft Sentinel."", ""instructions"": [{""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Netskope organisation url"", ""description"": ""The Netskope data connector requires you to provide your organisation url. You can find your organisation url by signing into the Netskope portal.""}, {""name"": ""Netskope API key"", ""description"": ""The Netskope data connector requires you to provide a valid API key. You can create one by following the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeAlertsEvents_RestAPI_CCP/NetskopeAlertsEvents_ConnectorDefination.json","true" +"NetskopeEventsInfrastructure_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeAlertsEvents","Netskope","Netskope Alerts and Events","Netskope Security Alerts and Events","[{""title"": ""STEP 1 - Create a Netskope API key."", ""description"": ""Follow the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/) for guidance on this step.""}, {""title"": ""STEP 2 - Enter your Netskope product Details"", ""description"": ""Enter your Netskope organisation url & API Token below:"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Organisation Url"", ""placeholder"": ""Enter your organisation url"", ""type"": ""text"", ""name"": ""OrganisationURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""Enter your API Key"", ""type"": ""password"", ""name"": ""apikey""}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Remediation"", ""type"": ""text"", ""name"": ""NetskopeAlertsRemediationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Remediation data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Uba"", ""type"": ""text"", ""name"": ""NetskopeAlertsUbaingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Uba data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Security Assessment"", ""type"": ""text"", ""name"": ""NetskopeAlertsSecurityAssessmentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Security Assessment data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Quarantine"", ""type"": ""text"", ""name"": ""NetskopeAlertsQuarantineingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Quarantine data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Policy"", ""type"": ""text"", ""name"": ""NetskopeAlertsPolicyingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Policy data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malware"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalwareingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malware data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malsite"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalsiteingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malsite data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts DLP"", ""type"": ""text"", ""name"": ""NetskopeAlertsDlpingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts CTEP"", ""type"": ""text"", ""name"": ""NetskopeAlertsCtepingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts CTEP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Watchlist"", ""type"": ""text"", ""name"": ""NetskopeAlertsWatchlistingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Watchlist data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Compromised Credentials"", ""type"": ""text"", ""name"": ""NetskopeAlertsCompromisedCredentialsingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Compromised Credentials data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Content"", ""type"": ""text"", ""name"": ""NetskopeAlertsContentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Content data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Device"", ""type"": ""text"", ""name"": ""NetskopeAlertsDeviceingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Device data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Application"", ""type"": ""text"", ""name"": ""NetskopeEventsApplicationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Application data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Audit"", ""type"": ""text"", ""name"": ""NetskopeEventsAuditioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Audit data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Connection"", ""type"": ""text"", ""name"": ""NetskopeEventsConnectioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Connection data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events DLP"", ""type"": ""text"", ""name"": ""NetskopeEventsDLPingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Endpoint"", ""type"": ""text"", ""name"": ""NetskopeEventsEndpointingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Endpoint data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Infrastructure"", ""type"": ""text"", ""name"": ""NetskopeEventsInfrastructureingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Infrastructure data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Network"", ""type"": ""text"", ""name"": ""NetskopeEventsNetworkingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Network data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Page"", ""type"": ""text"", ""name"": ""NetskopeEventsPageingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Page data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""InstructionStepsGroup"", ""parameters"": {""instructionSteps"": [{""title"": ""OPTIONAL: Specify the Index the API uses."", ""description"": ""**Configuring the index is optional and only required in advanced scenario's.** \n Netskope uses an [index](https://docs.netskope.com/en/using-the-rest-api-v2-dataexport-iterator-endpoints/#how-do-iterator-endpoints-function) to retrieve events. In some advanced cases (consuming the event in multiple Microsoft Sentinel workspaces, or pre-fatiguing the index to only retrieve recent data), a customer might want to have direct control over the index."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Index"", ""placeholder"": ""NetskopeCCP"", ""type"": ""text"", ""name"": ""Index""}}]}]}}]}, {""title"": ""STEP 3 - Click Connect"", ""description"": ""Verify all fields above were filled in correctly. Press the Connect to connect Netskope to Microsoft Sentinel."", ""instructions"": [{""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Netskope organisation url"", ""description"": ""The Netskope data connector requires you to provide your organisation url. You can find your organisation url by signing into the Netskope portal.""}, {""name"": ""Netskope API key"", ""description"": ""The Netskope data connector requires you to provide a valid API key. You can create one by following the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeAlertsEvents_RestAPI_CCP/NetskopeAlertsEvents_ConnectorDefination.json","true" +"NetskopeEventsNetwork_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeAlertsEvents","Netskope","Netskope Alerts and Events","Netskope Security Alerts and Events","[{""title"": ""STEP 1 - Create a Netskope API key."", ""description"": ""Follow the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/) for guidance on this step.""}, {""title"": ""STEP 2 - Enter your Netskope product Details"", ""description"": ""Enter your Netskope organisation url & API Token below:"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Organisation Url"", ""placeholder"": ""Enter your organisation url"", ""type"": ""text"", ""name"": ""OrganisationURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""Enter your API Key"", ""type"": ""password"", ""name"": ""apikey""}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Remediation"", ""type"": ""text"", ""name"": ""NetskopeAlertsRemediationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Remediation data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Uba"", ""type"": ""text"", ""name"": ""NetskopeAlertsUbaingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Uba data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Security Assessment"", ""type"": ""text"", ""name"": ""NetskopeAlertsSecurityAssessmentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Security Assessment data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Quarantine"", ""type"": ""text"", ""name"": ""NetskopeAlertsQuarantineingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Quarantine data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Policy"", ""type"": ""text"", ""name"": ""NetskopeAlertsPolicyingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Policy data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malware"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalwareingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malware data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malsite"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalsiteingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malsite data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts DLP"", ""type"": ""text"", ""name"": ""NetskopeAlertsDlpingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts CTEP"", ""type"": ""text"", ""name"": ""NetskopeAlertsCtepingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts CTEP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Watchlist"", ""type"": ""text"", ""name"": ""NetskopeAlertsWatchlistingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Watchlist data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Compromised Credentials"", ""type"": ""text"", ""name"": ""NetskopeAlertsCompromisedCredentialsingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Compromised Credentials data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Content"", ""type"": ""text"", ""name"": ""NetskopeAlertsContentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Content data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Device"", ""type"": ""text"", ""name"": ""NetskopeAlertsDeviceingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Device data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Application"", ""type"": ""text"", ""name"": ""NetskopeEventsApplicationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Application data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Audit"", ""type"": ""text"", ""name"": ""NetskopeEventsAuditioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Audit data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Connection"", ""type"": ""text"", ""name"": ""NetskopeEventsConnectioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Connection data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events DLP"", ""type"": ""text"", ""name"": ""NetskopeEventsDLPingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Endpoint"", ""type"": ""text"", ""name"": ""NetskopeEventsEndpointingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Endpoint data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Infrastructure"", ""type"": ""text"", ""name"": ""NetskopeEventsInfrastructureingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Infrastructure data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Network"", ""type"": ""text"", ""name"": ""NetskopeEventsNetworkingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Network data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Page"", ""type"": ""text"", ""name"": ""NetskopeEventsPageingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Page data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""InstructionStepsGroup"", ""parameters"": {""instructionSteps"": [{""title"": ""OPTIONAL: Specify the Index the API uses."", ""description"": ""**Configuring the index is optional and only required in advanced scenario's.** \n Netskope uses an [index](https://docs.netskope.com/en/using-the-rest-api-v2-dataexport-iterator-endpoints/#how-do-iterator-endpoints-function) to retrieve events. In some advanced cases (consuming the event in multiple Microsoft Sentinel workspaces, or pre-fatiguing the index to only retrieve recent data), a customer might want to have direct control over the index."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Index"", ""placeholder"": ""NetskopeCCP"", ""type"": ""text"", ""name"": ""Index""}}]}]}}]}, {""title"": ""STEP 3 - Click Connect"", ""description"": ""Verify all fields above were filled in correctly. Press the Connect to connect Netskope to Microsoft Sentinel."", ""instructions"": [{""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Netskope organisation url"", ""description"": ""The Netskope data connector requires you to provide your organisation url. You can find your organisation url by signing into the Netskope portal.""}, {""name"": ""Netskope API key"", ""description"": ""The Netskope data connector requires you to provide a valid API key. You can create one by following the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeAlertsEvents_RestAPI_CCP/NetskopeAlertsEvents_ConnectorDefination.json","true" +"NetskopeEventsPage_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeAlertsEvents","Netskope","Netskope Alerts and Events","Netskope Security Alerts and Events","[{""title"": ""STEP 1 - Create a Netskope API key."", ""description"": ""Follow the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/) for guidance on this step.""}, {""title"": ""STEP 2 - Enter your Netskope product Details"", ""description"": ""Enter your Netskope organisation url & API Token below:"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Organisation Url"", ""placeholder"": ""Enter your organisation url"", ""type"": ""text"", ""name"": ""OrganisationURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""Enter your API Key"", ""type"": ""password"", ""name"": ""apikey""}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Remediation"", ""type"": ""text"", ""name"": ""NetskopeAlertsRemediationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Remediation data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Uba"", ""type"": ""text"", ""name"": ""NetskopeAlertsUbaingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Uba data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Security Assessment"", ""type"": ""text"", ""name"": ""NetskopeAlertsSecurityAssessmentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Security Assessment data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Quarantine"", ""type"": ""text"", ""name"": ""NetskopeAlertsQuarantineingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Quarantine data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Policy"", ""type"": ""text"", ""name"": ""NetskopeAlertsPolicyingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Policy data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malware"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalwareingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malware data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malsite"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalsiteingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malsite data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts DLP"", ""type"": ""text"", ""name"": ""NetskopeAlertsDlpingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts CTEP"", ""type"": ""text"", ""name"": ""NetskopeAlertsCtepingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts CTEP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Watchlist"", ""type"": ""text"", ""name"": ""NetskopeAlertsWatchlistingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Watchlist data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Compromised Credentials"", ""type"": ""text"", ""name"": ""NetskopeAlertsCompromisedCredentialsingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Compromised Credentials data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Content"", ""type"": ""text"", ""name"": ""NetskopeAlertsContentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Content data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Device"", ""type"": ""text"", ""name"": ""NetskopeAlertsDeviceingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Device data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Application"", ""type"": ""text"", ""name"": ""NetskopeEventsApplicationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Application data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Audit"", ""type"": ""text"", ""name"": ""NetskopeEventsAuditioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Audit data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Connection"", ""type"": ""text"", ""name"": ""NetskopeEventsConnectioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Connection data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events DLP"", ""type"": ""text"", ""name"": ""NetskopeEventsDLPingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Endpoint"", ""type"": ""text"", ""name"": ""NetskopeEventsEndpointingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Endpoint data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Infrastructure"", ""type"": ""text"", ""name"": ""NetskopeEventsInfrastructureingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Infrastructure data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Network"", ""type"": ""text"", ""name"": ""NetskopeEventsNetworkingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Network data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Page"", ""type"": ""text"", ""name"": ""NetskopeEventsPageingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Page data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""InstructionStepsGroup"", ""parameters"": {""instructionSteps"": [{""title"": ""OPTIONAL: Specify the Index the API uses."", ""description"": ""**Configuring the index is optional and only required in advanced scenario's.** \n Netskope uses an [index](https://docs.netskope.com/en/using-the-rest-api-v2-dataexport-iterator-endpoints/#how-do-iterator-endpoints-function) to retrieve events. In some advanced cases (consuming the event in multiple Microsoft Sentinel workspaces, or pre-fatiguing the index to only retrieve recent data), a customer might want to have direct control over the index."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Index"", ""placeholder"": ""NetskopeCCP"", ""type"": ""text"", ""name"": ""Index""}}]}]}}]}, {""title"": ""STEP 3 - Click Connect"", ""description"": ""Verify all fields above were filled in correctly. Press the Connect to connect Netskope to Microsoft Sentinel."", ""instructions"": [{""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Netskope organisation url"", ""description"": ""The Netskope data connector requires you to provide your organisation url. You can find your organisation url by signing into the Netskope portal.""}, {""name"": ""Netskope API key"", ""description"": ""The Netskope data connector requires you to provide a valid API key. You can create one by following the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeAlertsEvents_RestAPI_CCP/NetskopeAlertsEvents_ConnectorDefination.json","true" +"Netskope_WebTx_metrics_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" +"alertscompromisedcredentialdata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" +"alertsctepdata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" +"alertsdlpdata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" +"alertsmalsitedata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" +"alertsmalwaredata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" +"alertspolicydata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" +"alertsquarantinedata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" +"alertsremediationdata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" +"alertssecurityassessmentdata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" +"alertsubadata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" +"eventsapplicationdata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" +"eventsauditdata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" +"eventsconnectiondata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" +"eventsincidentdata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" +"eventsnetworkdata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" +"eventspagedata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" +"NetskopeWebtxData_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeWebTransactionsDataConnector","Netskope","Netskope Web Transactions Data Connector","The [Netskope Web Transactions](https://docs.netskope.com/en/netskope-help/data-security/transaction-events/netskope-transaction-events/) data connector provides the functionality of a docker image to pull the Netskope Web Transactions data from google pubsublite, process the data and ingest the processed data to Log Analytics. As part of this data connector two tables will be formed in Log Analytics, one for Web Transactions data and other for errors encountered during execution.


For more details related to Web Transactions refer to the below documentation:
1. Netskope Web Transactions documentation:
> https://docs.netskope.com/en/netskope-help/data-security/transaction-events/netskope-transaction-events/
","[{""title"": """", ""description"": "">**NOTE:** This connector provides the functionality of ingesting Netskope Web Transactions data using a docker image to be deployed on a virtual machine (Either Azure VM/On Premise VM). Check the [Azure VM pricing page](https://azure.microsoft.com/pricing/details/virtual-machines/linux) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 2 - Choose one from the following two deployment options to deploy the docker based data connector to ingest Netskope Web Transactions data **\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available, as well as the Netskope API Authorization Key(s) [Make sure the token has permissions for transaction events]."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Using Azure Resource Manager (ARM) Template to deploy VM [Recommended]"", ""description"": ""Using the ARM template deploy an Azure VM, install the prerequisites and start execution.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2WebTransactions-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tDocker Image Name (mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions)\n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSeek Timestamp (The epoch timestamp that you want to seek the pubsublite pointer, can be left empty) \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tBackoff Retry Count (The retry count for token related errors before restarting the execution.) \n\t\tBackoff Sleep Time (Number of seconds to sleep before retrying) \n\t\tIdle Timeout (Number of seconds to wait for Web Transactions Data before restarting execution) \n\t\tVM Name \n\t\tAuthentication Type \n\t\tAdmin Password or Key \n\t\tDNS Label Prefix \n\t\tUbuntu OS Version \n\t\tLocation \n\t\tVM Size \n\t\tSubnet Name \n\t\tNetwork Security Group Name \n\t\tSecurity Type \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}, {""title"": ""Option 2 - Manual Deployment on previously created virtual machine"", ""description"": ""Use the following step-by-step instructions to deploy the docker based data connector manually on a previously created virtual machine.""}, {""title"": """", ""description"": ""**1. Install docker and pull docker Image**\n\n>**NOTE:** Make sure that the VM is linux based (preferably Ubuntu).\n\n1. Firstly you will need to [SSH into the virtual machine](https://learn.microsoft.com/azure/virtual-machines/linux-vm-connect?tabs=Linux).\n2. Now install [docker engine](https://docs.docker.com/engine/install/).\n3. Now pull the docker image from docker hub using the command: 'sudo docker pull mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions'.\n4. Now to run the docker image use the command: 'sudo docker run -it -v $(pwd)/docker_persistent_volume:/app mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions'. You can replace mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions with the image id. Here docker_persistent_volume is the name of the folder that would be created on the vm in which the files will get stored.""}, {""title"": """", ""description"": ""**2. Configure the Parameters**\n\n1. Once the docker image is running it will ask for the required parameters.\n2. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSeek Timestamp (The epoch timestamp that you want to seek the pubsublite pointer, can be left empty) \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tBackoff Retry Count (The retry count for token related errors before restarting the execution.) \n\t\tBackoff Sleep Time (Number of seconds to sleep before retrying) \n\t\tIdle Timeout (Number of seconds to wait for Web Transactions Data before restarting execution)\n3. Now the execution has started but is in interactive mode, so that shell cannot be stopped. To run it as a background process, stop the current execution by pressing Ctrl+C and then use the command: 'sudo docker run -d -v $(pwd)/docker_persistent_volume:/app mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions'.""}, {""title"": """", ""description"": ""**3. Stop the docker container**\n\n1. Use the command 'sudo docker container ps' to list the running docker containers. Note down your container id.\n2. Now stop the container using the command: 'sudo docker stop *<*container-id*>*'.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Compute permissions"", ""description"": ""Read and write permissions to Azure VMs is required. [See the documentation to learn more about Azure VMs](https://learn.microsoft.com/azure/virtual-machines/overview).""}, {""name"": ""TransactionEvents Credentials and Permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. [See the documentation to learn more about Transaction Events.](https://docs.netskope.com/en/netskope-help/data-security/transaction-events/netskope-transaction-events/)""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeWebTransactionsDataConnector/Netskope_WebTransactions.json","true" +"NetskopeWebtxErrors_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeWebTransactionsDataConnector","Netskope","Netskope Web Transactions Data Connector","The [Netskope Web Transactions](https://docs.netskope.com/en/netskope-help/data-security/transaction-events/netskope-transaction-events/) data connector provides the functionality of a docker image to pull the Netskope Web Transactions data from google pubsublite, process the data and ingest the processed data to Log Analytics. As part of this data connector two tables will be formed in Log Analytics, one for Web Transactions data and other for errors encountered during execution.


For more details related to Web Transactions refer to the below documentation:
1. Netskope Web Transactions documentation:
> https://docs.netskope.com/en/netskope-help/data-security/transaction-events/netskope-transaction-events/
","[{""title"": """", ""description"": "">**NOTE:** This connector provides the functionality of ingesting Netskope Web Transactions data using a docker image to be deployed on a virtual machine (Either Azure VM/On Premise VM). Check the [Azure VM pricing page](https://azure.microsoft.com/pricing/details/virtual-machines/linux) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 2 - Choose one from the following two deployment options to deploy the docker based data connector to ingest Netskope Web Transactions data **\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available, as well as the Netskope API Authorization Key(s) [Make sure the token has permissions for transaction events]."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Using Azure Resource Manager (ARM) Template to deploy VM [Recommended]"", ""description"": ""Using the ARM template deploy an Azure VM, install the prerequisites and start execution.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2WebTransactions-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tDocker Image Name (mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions)\n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSeek Timestamp (The epoch timestamp that you want to seek the pubsublite pointer, can be left empty) \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tBackoff Retry Count (The retry count for token related errors before restarting the execution.) \n\t\tBackoff Sleep Time (Number of seconds to sleep before retrying) \n\t\tIdle Timeout (Number of seconds to wait for Web Transactions Data before restarting execution) \n\t\tVM Name \n\t\tAuthentication Type \n\t\tAdmin Password or Key \n\t\tDNS Label Prefix \n\t\tUbuntu OS Version \n\t\tLocation \n\t\tVM Size \n\t\tSubnet Name \n\t\tNetwork Security Group Name \n\t\tSecurity Type \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}, {""title"": ""Option 2 - Manual Deployment on previously created virtual machine"", ""description"": ""Use the following step-by-step instructions to deploy the docker based data connector manually on a previously created virtual machine.""}, {""title"": """", ""description"": ""**1. Install docker and pull docker Image**\n\n>**NOTE:** Make sure that the VM is linux based (preferably Ubuntu).\n\n1. Firstly you will need to [SSH into the virtual machine](https://learn.microsoft.com/azure/virtual-machines/linux-vm-connect?tabs=Linux).\n2. Now install [docker engine](https://docs.docker.com/engine/install/).\n3. Now pull the docker image from docker hub using the command: 'sudo docker pull mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions'.\n4. Now to run the docker image use the command: 'sudo docker run -it -v $(pwd)/docker_persistent_volume:/app mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions'. You can replace mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions with the image id. Here docker_persistent_volume is the name of the folder that would be created on the vm in which the files will get stored.""}, {""title"": """", ""description"": ""**2. Configure the Parameters**\n\n1. Once the docker image is running it will ask for the required parameters.\n2. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSeek Timestamp (The epoch timestamp that you want to seek the pubsublite pointer, can be left empty) \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tBackoff Retry Count (The retry count for token related errors before restarting the execution.) \n\t\tBackoff Sleep Time (Number of seconds to sleep before retrying) \n\t\tIdle Timeout (Number of seconds to wait for Web Transactions Data before restarting execution)\n3. Now the execution has started but is in interactive mode, so that shell cannot be stopped. To run it as a background process, stop the current execution by pressing Ctrl+C and then use the command: 'sudo docker run -d -v $(pwd)/docker_persistent_volume:/app mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions'.""}, {""title"": """", ""description"": ""**3. Stop the docker container**\n\n1. Use the command 'sudo docker container ps' to list the running docker containers. Note down your container id.\n2. Now stop the container using the command: 'sudo docker stop *<*container-id*>*'.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Compute permissions"", ""description"": ""Read and write permissions to Azure VMs is required. [See the documentation to learn more about Azure VMs](https://learn.microsoft.com/azure/virtual-machines/overview).""}, {""name"": ""TransactionEvents Credentials and Permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. [See the documentation to learn more about Transaction Events.](https://docs.netskope.com/en/netskope-help/data-security/transaction-events/netskope-transaction-events/)""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeWebTransactionsDataConnector/Netskope_WebTransactions.json","true" +"","Network Session Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Network%20Session%20Essentials","azuresentinel","azure-sentinel-solution-networksession","2022-11-11","2022-11-11","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","Network Threat Protection Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Network%20Threat%20Protection%20Essentials","azuresentinel","azure-sentinel-solution-networkthreatdetection","2022-11-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"CommonSecurityLog","Netwrix Auditor","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netwrix%20Auditor","azuresentinel","azure-sentinel-solution-netwrixauditor","2022-06-17","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Netwrix","Netwrix","[Deprecated] Netwrix Auditor via Legacy Agent","Netwrix Auditor data connector provides the capability to ingest [Netwrix Auditor (formerly Stealthbits Privileged Activity Manager)](https://www.netwrix.com/auditor.html) events into Microsoft Sentinel. Refer to [Netwrix documentation](https://helpcenter.netwrix.com/) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on NetwrixAuditor parser based on a Kusto Function to work as expected. This parser is installed along with solution installation."", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Configure Netwrix Auditor to send logs using CEF"", ""description"": ""[Follow the instructions](https://www.netwrix.com/download/QuickStart/Netwrix_Auditor_Add-on_for_HPE_ArcSight_Quick_Start_Guide.pdf) to configure event export from Netwrix Auditor.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netwrix%20Auditor/Data%20Connectors/Connector_NetwrixAuditor.json","true" +"CommonSecurityLog","Netwrix Auditor","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netwrix%20Auditor","azuresentinel","azure-sentinel-solution-netwrixauditor","2022-06-17","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","NetwrixAma","Netwrix","[Deprecated] Netwrix Auditor via AMA","Netwrix Auditor data connector provides the capability to ingest [Netwrix Auditor (formerly Stealthbits Privileged Activity Manager)](https://www.netwrix.com/auditor.html) events into Microsoft Sentinel. Refer to [Netwrix documentation](https://helpcenter.netwrix.com/) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on NetwrixAuditor parser based on a Kusto Function to work as expected. This parser is installed along with solution installation."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Configure Netwrix Auditor to send logs using CEF"", ""description"": ""[Follow the instructions](https://www.netwrix.com/download/QuickStart/Netwrix_Auditor_Add-on_for_HPE_ArcSight_Quick_Start_Guide.pdf) to configure event export from Netwrix Auditor."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netwrix%20Auditor/Data%20Connectors/template_NetwrixAuditorAMA.json","true" +"","Neustar IP GeoPoint","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Neustar%20IP%20GeoPoint","azuresentinel","azure-sentinel-solution-neustaripgeopoint","2022-09-30","2022-09-30","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"NonameAPISecurityAlert_CL","NonameSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NonameSecurity","nonamegate","nonamesecurity_sentinelsolution","2022-12-01","","","Noname Security","Partner","https://nonamesecurity.com/","","domains","NonameSecurityMicrosoftSentinel","Noname Security","Noname Security for Microsoft Sentinel","Noname Security solution to POST data into a Microsoft Sentinel SIEM workspace via the Azure Monitor REST API","[{""title"": ""Configure the Noname Sentinel integration."", ""description"": ""Configure the Sentinel workflow in the Noname integrations settings. Find documentation at https://docs.nonamesecurity.com"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NonameSecurity/Data%20Connectors/Connector_RESTAPI_NonameSecurity.json","true" +"NordPassEventLogs_CL","NordPass","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NordPass","nordsecurityinc","azure-sentinel-solution-nordpass","2025-04-22","","","NordPass","Partner","https://support.nordpass.com/","","domains","NordPass","NordPass","NordPass","Integrating NordPass with Microsoft Sentinel SIEM via the API will allow you to automatically transfer Activity Log data from NordPass to Microsoft Sentinel and get real-time insights, such as item activity, all login attempts, and security notifications.","[{""description"": ""To proceed with the Microsoft Sentinel setup\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-Nordpass-azuredeploy)\n2. **Please note that after the successful deployment, the system pulls Activity Log data every 1 minute by default.**""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""description"": ""Ensure that the [resource group](https://learn.microsoft.com/en-us/azure/azure-resource-manager/management/manage-resource-groups-portal#create-resource-groups) and the [Log Analytics workspace](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/quick-create-workspace?tabs=azure-portal) are created and located in the same region so you can deploy the Azure Functions.""}, {""description"": ""[Add Microsoft Sentinel](https://learn.microsoft.com/en-us/azure/sentinel/quickstart-onboard#enable-microsoft-sentinel-) to the created Log Analytics workspace.""}, {""description"": ""Generate a [Microsoft Sentinel API URL and token](https://www.google.com/url?q=https://support.nordpass.com/hc/en-us/articles/31972037289873&sa=D&source=docs&ust=1743770997230005&usg=AOvVaw16p0hstJ6OeBBoFdBKZRfr) in the NordPass Admin Panel to finish the Azure Functions integration. Please note that you\u2019ll need the NordPass Enterprise account for that.""}, {""description"": ""**Important:** This connector uses Azure Functions to retrieve Activity Logs from NordPass into Microsoft Sentinel. This may result in additional data ingestion costs. For more information, refer to the Azure Functions pricing page.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NordPass/Data%20Connectors/NordPass_API_FunctionApp.json;https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NordPass/Data%20Connectors/deployment/NordPass_data_connector.json","false" +"CommonSecurityLog","NozomiNetworks","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NozomiNetworks","azuresentinel","azure-sentinel-solution-nozominetworks","2022-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","NozomiNetworksN2OS","Nozomi Networks","[Deprecated] Nozomi Networks N2OS via Legacy Agent","The [Nozomi Networks](https://www.nozominetworks.com/) data connector provides the capability to ingest Nozomi Networks Events into Microsoft Sentinel. Refer to the Nozomi Networks [PDF documentation](https://www.nozominetworks.com/resources/data-sheets-brochures-learning-guides/) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**NozomiNetworksEvents**](https://aka.ms/sentinel-NozomiNetworks-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Follow these steps to configure Nozomi Networks device for sending Alerts, Audit Logs, Health Logs log via syslog in CEF format:\n\n> 1. Log in to the Guardian console.\n\n> 2. Navigate to Administration->Data Integration, press +Add and select the Common Event Format (CEF) from the drop down\n\n> 3. Create New Endpoint using the appropriate host information and enable Alerts, Audit Logs, Health Logs for sending.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NozomiNetworks/Data%20Connectors/NozomiNetworksN2OS.json","true" +"CommonSecurityLog","NozomiNetworks","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NozomiNetworks","azuresentinel","azure-sentinel-solution-nozominetworks","2022-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","NozomiNetworksN2OSAma","Nozomi Networks","[Deprecated] Nozomi Networks N2OS via AMA","The [Nozomi Networks](https://www.nozominetworks.com/) data connector provides the capability to ingest Nozomi Networks Events into Microsoft Sentinel. Refer to the Nozomi Networks [PDF documentation](https://www.nozominetworks.com/resources/data-sheets-brochures-learning-guides/) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**NozomiNetworksEvents**](https://aka.ms/sentinel-NozomiNetworks-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Follow these steps to configure Nozomi Networks device for sending Alerts, Audit Logs, Health Logs log via syslog in CEF format:\n\n> 1. Log in to the Guardian console.\n\n> 2. Navigate to Administration->Data Integration, press +Add and select the Common Event Format (CEF) from the drop down\n\n> 3. Create New Endpoint using the appropriate host information and enable Alerts, Audit Logs, Health Logs for sending."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NozomiNetworks/Data%20Connectors/template_NozomiNetworksN2OSAMA.json","true" +"CommonSecurityLog","OSSEC","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OSSEC","azuresentinel","azure-sentinel-solution-ossec","2022-05-19","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","OSSEC","OSSEC","[Deprecated] OSSEC via Legacy Agent","OSSEC data connector provides the capability to ingest [OSSEC](https://www.ossec.net/) events into Microsoft Sentinel. Refer to [OSSEC documentation](https://www.ossec.net/docs) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias OSSEC and load the function code or click [here](https://aka.ms/sentinel-OSSECEvent-parser), on the second line of the query, enter the hostname(s) of your OSSEC device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""[Follow these steps](https://www.ossec.net/docs/docs/manual/output/syslog-output.html) to configure OSSEC sending alerts via syslog.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OSSEC/Data%20Connectors/Connector_CEF_OSSEC.json","true" +"CommonSecurityLog","OSSEC","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OSSEC","azuresentinel","azure-sentinel-solution-ossec","2022-05-19","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","OSSECAma","OSSEC","[Deprecated] OSSEC via AMA","OSSEC data connector provides the capability to ingest [OSSEC](https://www.ossec.net/) events into Microsoft Sentinel. Refer to [OSSEC documentation](https://www.ossec.net/docs) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias OSSEC and load the function code or click [here](https://aka.ms/sentinel-OSSECEvent-parser), on the second line of the query, enter the hostname(s) of your OSSEC device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""[Follow these steps](https://www.ossec.net/docs/docs/manual/output/syslog-output.html) to configure OSSEC sending alerts via syslog."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OSSEC/Data%20Connectors/template_OSSECAMA.json","true" +"ObsidianActivity_CL","Obsidian Datasharing","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Obsidian%20Datasharing","391c3d87-edc8-4f72-a719-825c022b8eb4","azure-sentinel-solution-obsidian-activity-threat","2024-01-01","","","Obsidian Security","Partner","https://obsidiansecurity.com/contact","","domains","ObsidianDatasharing","Obsidian Security","Obsidian Datasharing Connector","The Obsidian Datasharing connector provides the capability to read raw event data from Obsidian Datasharing in Microsoft Sentinel.","[{""title"": ""1. Create ARM Resources and Provide the Required Permissions"", ""description"": ""This connector reads data from the tables that Obsidian Datasharing uses in a Microsoft Analytics Workspace, if the data forwarding option is enabled in Obsidian Datasharing then raw event data is sent to the Microsoft Sentinel Ingestion API."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated Configuration and Secure Data Ingestion with Entra Application \nClicking on \""Deploy\"" will trigger the creation of Log Analytics tables and a Data Collection Rule (DCR). \nIt will then create an Entra application, link the DCR to it, and set the entered secret in the application. This setup enables data to be sent securely to the DCR using an Entra token.""}}, {""parameters"": {""label"": ""Deploy Obsidian Datasharing connector resources"", ""applicationDisplayName"": ""Obsidian Datasharing Connector Application""}, ""type"": ""DeployPushConnectorButton""}]}, {""title"": ""2. Push your logs into the workspace"", ""description"": ""Use the following parameters to configure the your machine to send the logs to the workspace."", ""instructions"": [{""parameters"": {""label"": ""Tenant ID (Directory ID)"", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Application ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the App Registration Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Secret"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the App Registration Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Endpoint Uri"", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the Data Collection Endpoint Uri""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Rule Immutable ID"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the Data Collection Rule Immutable ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Activity Stream Name"", ""value"": ""Custom-ObsidianActivity_CL""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Threat Stream Name"", ""value"": ""Custom-ObsidianThreat_CL""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rule (DCR). Typically requires Azure RBAC Owner or User Access Administrator role""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Obsidian%20Datasharing/Data%20Connectors/ObsidianDatasharing_CCP/ObsidianDatasharing_ConnectorDefinition.json","true" +"ObsidianThreat_CL","Obsidian Datasharing","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Obsidian%20Datasharing","391c3d87-edc8-4f72-a719-825c022b8eb4","azure-sentinel-solution-obsidian-activity-threat","2024-01-01","","","Obsidian Security","Partner","https://obsidiansecurity.com/contact","","domains","ObsidianDatasharing","Obsidian Security","Obsidian Datasharing Connector","The Obsidian Datasharing connector provides the capability to read raw event data from Obsidian Datasharing in Microsoft Sentinel.","[{""title"": ""1. Create ARM Resources and Provide the Required Permissions"", ""description"": ""This connector reads data from the tables that Obsidian Datasharing uses in a Microsoft Analytics Workspace, if the data forwarding option is enabled in Obsidian Datasharing then raw event data is sent to the Microsoft Sentinel Ingestion API."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated Configuration and Secure Data Ingestion with Entra Application \nClicking on \""Deploy\"" will trigger the creation of Log Analytics tables and a Data Collection Rule (DCR). \nIt will then create an Entra application, link the DCR to it, and set the entered secret in the application. This setup enables data to be sent securely to the DCR using an Entra token.""}}, {""parameters"": {""label"": ""Deploy Obsidian Datasharing connector resources"", ""applicationDisplayName"": ""Obsidian Datasharing Connector Application""}, ""type"": ""DeployPushConnectorButton""}]}, {""title"": ""2. Push your logs into the workspace"", ""description"": ""Use the following parameters to configure the your machine to send the logs to the workspace."", ""instructions"": [{""parameters"": {""label"": ""Tenant ID (Directory ID)"", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Application ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the App Registration Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Secret"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the App Registration Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Endpoint Uri"", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the Data Collection Endpoint Uri""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Rule Immutable ID"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the Data Collection Rule Immutable ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Activity Stream Name"", ""value"": ""Custom-ObsidianActivity_CL""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Threat Stream Name"", ""value"": ""Custom-ObsidianThreat_CL""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rule (DCR). Typically requires Azure RBAC Owner or User Access Administrator role""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Obsidian%20Datasharing/Data%20Connectors/ObsidianDatasharing_CCP/ObsidianDatasharing_ConnectorDefinition.json","true" +"Okta_CL","Okta Single Sign-On","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On","azuresentinel","azure-sentinel-solution-okta","2022-03-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OktaSSO","Okta","Okta Single Sign-On","The [Okta Single Sign-On (SSO)](https://www.okta.com/products/single-sign-on/) connector provides the capability to ingest audit and event logs from the Okta API into Microsoft Sentinel. The connector provides visibility into these log types in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Okta SSO to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated, if you have previously deployed an earlier version, and want to update, please delete the existing Okta Azure Function before redeploying this version.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Okta SSO API**\n\n [Follow these instructions](https://developer.okta.com/docs/guides/create-an-api-token/create-the-token/) to create an API Token.""}, {""title"": """", ""description"": ""**Note** - For more information on the rate limit restrictions enforced by Okta, please refer to the **[documentation](https://developer.okta.com/docs/reference/rl-global-mgmt/)**.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Okta SSO connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Okta SSO API Authorization Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""This method provides an automated deployment of the Okta SSO connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentineloktaazuredeployv2-solution) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentineloktaazuredeployv2-solution-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Token** and **URI**. \n - Use the following schema for the `uri` value: `https:///api/v1/logs?since=` Replace `` with your domain. [Click here](https://developer.okta.com/docs/reference/api-overview/#url-namespace) for further details on how to identify your Okta domain namespace. There is no need to add a time value to the URI, the Function App will dynamically append the inital start time of logs to UTC 0:00 for the current UTC date as time value to the URI in the proper format. \n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Okta SSO connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentineloktaazurefunctioncodev2) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following five (5) application settings individually, with their respective string values (case-sensitive): \n\t\tapiToken\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\tlogAnalyticsUri (optional)\n - Use the following schema for the `uri` value: `https:///api/v1/logs?since=` Replace `` with your domain. [Click here](https://developer.okta.com/docs/reference/api-overview/#url-namespace) for further details on how to identify your Okta domain namespace. There is no need to add a time value to the URI, the Function App will dynamically append the inital start time of logs to UTC 0:00 for the current UTC date as time value to the URI in the proper format.\n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: https://.ods.opinsights.azure.us. \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Okta API Token"", ""description"": ""An Okta API Token is required. See the documentation to learn more about the [Okta System Log API](https://developer.okta.com/docs/reference/api/system-log/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On/Data%20Connectors/OktaSingleSign-On/Connector_REST_API_FunctionApp_Okta.json","true" +"OktaNativePoller_CL","Okta Single Sign-On","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On","azuresentinel","azure-sentinel-solution-okta","2022-03-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OktaSSO_Polling","Okta","Okta Single Sign-On (Polling CCP)","The [Okta Single Sign-On (SSO)](https://www.okta.com/products/single-sign-on/) connector provides the capability to ingest audit and event logs from the Okta API into Microsoft entinel. The connector provides visibility into these log types in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.","[{""title"": ""Connect OktaSSO"", ""description"": ""Please insert your APIKey"", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Domain Name"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{placeHolder1}}"", ""placeHolderValue"": """"}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On/Data%20Connectors/OktaNativePollerConnector/azuredeploy_Okta_native_poller_connector.json","true" +"OktaV2_CL","Okta Single Sign-On","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On","azuresentinel","azure-sentinel-solution-okta","2022-03-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OktaSSOv2","Microsoft","Okta Single Sign-On","The [Okta Single Sign-On (SSO)](https://www.okta.com/products/single-sign-on/) data connector provides the capability to ingest audit and event logs from the Okta Sysem Log API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform and uses the Okta System Log API to fetch the events. The connector supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance.","[{""description"": ""To enable the Okta Single Sign-On for Microsoft Sentinel, provide the required information below and click on Connect.\n>"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Endpoint"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add domain"", ""title"": ""Add domain"", ""subtitle"": ""Add domain"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Okta Domain Name"", ""placeholder"": ""Okta Domain Name (e.g., myDomain.okta.com)"", ""type"": ""text"", ""name"": ""domainname""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""API Key"", ""type"": ""password"", ""name"": ""apikey""}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Okta API Token"", ""description"": ""An Okta API token. Follow the [following instructions](https://developer.okta.com/docs/guides/create-an-api-token/main/) to create an See the [documentation](https://developer.okta.com/docs/reference/api/system-log/) to learn more about Okta System Log API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On/Data%20Connectors/OktaNativePollerConnectorV2/OktaSSOv2_DataConnectorDefinition.json","true" +"Okta_CL","Okta Single Sign-On","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On","azuresentinel","azure-sentinel-solution-okta","2022-03-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OktaSSOv2","Microsoft","Okta Single Sign-On","The [Okta Single Sign-On (SSO)](https://www.okta.com/products/single-sign-on/) data connector provides the capability to ingest audit and event logs from the Okta Sysem Log API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform and uses the Okta System Log API to fetch the events. The connector supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance.","[{""description"": ""To enable the Okta Single Sign-On for Microsoft Sentinel, provide the required information below and click on Connect.\n>"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Endpoint"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add domain"", ""title"": ""Add domain"", ""subtitle"": ""Add domain"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Okta Domain Name"", ""placeholder"": ""Okta Domain Name (e.g., myDomain.okta.com)"", ""type"": ""text"", ""name"": ""domainname""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""API Key"", ""type"": ""password"", ""name"": ""apikey""}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Okta API Token"", ""description"": ""An Okta API token. Follow the [following instructions](https://developer.okta.com/docs/guides/create-an-api-token/main/) to create an See the [documentation](https://developer.okta.com/docs/reference/api/system-log/) to learn more about Okta System Log API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On/Data%20Connectors/OktaNativePollerConnectorV2/OktaSSOv2_DataConnectorDefinition.json","true" +"signIns","Okta Single Sign-On","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On","azuresentinel","azure-sentinel-solution-okta","2022-03-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OktaSSOv2","Microsoft","Okta Single Sign-On (Preview)","The [Okta Single Sign-On (SSO)](https://www.okta.com/products/single-sign-on/) data connector provides the capability to ingest audit and event logs from the Okta Sysem Log API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform and uses the Okta System Log API to fetch the events. The connector supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance.","[{""description"": ""To enable the Okta Single Sign-On for Microsoft Sentinel, provide the required information below and click on Connect.\n>"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Endpoint"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add domain"", ""title"": ""Add domain"", ""subtitle"": ""Add domain"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Okta Domain Name"", ""placeholder"": ""Okta Domain Name (e.g., myDomain.okta.com)"", ""type"": ""text"", ""name"": ""domainname""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""API Key"", ""type"": ""password"", ""name"": ""apikey""}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Okta API Token"", ""description"": ""An Okta API token. Follow the [following instructions](https://developer.okta.com/docs/guides/create-an-api-token/main/) to create an See the [documentation](https://developer.okta.com/docs/reference/api/system-log/) to learn more about Okta System Log API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On/Data%20Connectors/OktaNativePollerConnectorV2/azuredeploy_Okta_native_poller_connector_v2.json","true" +"Onapsis_Defend_CL","Onapsis Defend","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Onapsis%20Defend","onapsis","azure-sentinel-solution-onapsis-defend","2025-07-17","2025-07-17","","Onapsis","Partner","https://onapsis.com/support/","","domains","Onapsis","Onapsis Platform","Onapsis Defend Integration","Onapsis Defend Integration is aimed at forwarding alerts and logs collected and detected by Onapsis Platform into Microsoft Sentinel SIEM","[{""title"": ""1. Create ARM Resources and Provide the Required Permissions"", ""description"": ""We will create data collection rule (DCR) and data collection endpoint (DCE) resources. We will also create a Microsoft Entra app registration and assign the required permissions to it."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated deployment of Azure resources\nClicking on \""Deploy push connector resources\"" will trigger the creation of DCR and DCE resources.\nIt will then create a Microsoft Entra app registration with client secret and grant permissions on the DCR. This setup enables data to be sent securely to the DCR using a OAuth v2 client credentials.""}}, {""parameters"": {""label"": ""Deploy push connector resources"", ""applicationDisplayName"": ""Onapsis Defend Integration push to Microsoft Sentinel""}, ""type"": ""DeployPushConnectorButton_test""}]}, {""title"": ""2. Maintain the data collection endpoint details and authentication info in Onapsis Defend Integration"", ""description"": ""Share the data collection endpoint URL and authentication info with the Onapsis Defend Integration administrator to configure the Onapsis Defend Integration to send data to the data collection endpoint."", ""instructions"": [{""parameters"": {""label"": ""Use this value to configure as Tenant ID in the LogIngestionAPI credential."", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra Application ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra Application Secret"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the Application Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Use this value to configure the LogsIngestionURL parameter when deploying the IFlow."", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the DCE URI""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""DCR Immutable ID"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the DCR ID""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rules. Typically requires Azure RBAC Owner or User Access Administrator role.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Onapsis%20Defend/Data%20Connectors/Onapsis.json","true" +"Onapsis_Defend_CL","Onapsis Defend","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Onapsis%20Defend","onapsis","azure-sentinel-solution-onapsis-defend","2025-07-17","2025-07-17","","Onapsis","Partner","https://onapsis.com/support/","","domains","Onapsis","Onapsis SE","Onapsis Defend: Integrate Unmatched SAP Threat Detection & Intel with Microsoft Sentinel","Empower security teams with deep visibility into unique exploit, zero-day, and threat actor activity; suspicious user or insider behavior; sensitive data downloads; security control violations; and more - all enriched by the SAP experts at Onapsis.","[{""title"": ""1. Create ARM Resources and Provide the Required Permissions"", ""description"": ""We will create data collection rule (DCR) and data collection endpoint (DCE) resources. We will also create a Microsoft Entra app registration and assign the required permissions to it."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated deployment of Azure resources\nClicking on \""Deploy push connector resources\"" will trigger the creation of DCR and DCE resources.\nIt will then create a Microsoft Entra app registration with client secret and grant permissions on the DCR. This setup enables data to be sent securely to the DCR using a OAuth v2 client credentials.""}}, {""parameters"": {""label"": ""Deploy push connector resources"", ""applicationDisplayName"": ""Onapsis Defend Integration push to Microsoft Sentinel""}, ""type"": ""DeployPushConnectorButton_test""}]}, {""title"": ""2. Maintain the data collection endpoint details and authentication info in Onapsis Defend Integration"", ""description"": ""Share the data collection endpoint URL and authentication info with the Onapsis Defend Integration administrator to configure the Onapsis Defend Integration to send data to the data collection endpoint."", ""instructions"": [{""parameters"": {""label"": ""Tenant ID | Use this value to configure as Tenant ID"", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra Application ID | Use this value for the Client ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra Application Secret | Use this value for the Token"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the Application Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""LogIngestionURL | Use this value for the URL parameter"", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the DCE URI""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""DCR Immutable ID | Use this value for the DCR_ID parameter"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the DCR ID""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rules. Typically requires Azure RBAC Owner or User Access Administrator role.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Onapsis%20Defend/Data%20Connectors/Onapsis_PUSH_CCP/Onapsis_connectorDefinition.json","true" +"CommonSecurityLog","Onapsis Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Onapsis%20Platform","onapsis","onapsis_mss","2022-05-11","","","Onapsis","Partner","https://onapsis.com/company/contact-us","","domains","OnapsisPlatform","Onapsis","[Deprecated] Onapsis Platform","The Onapsis Connector allows you to export the alarms triggered in the Onapsis Platform into Microsoft Sentinel in real-time. This gives you the ability to monitor the activity on your SAP systems, identify incidents and respond to them quickly.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your Onapsis Console and Microsoft Sentinel. This machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Refer to the Onapsis in-product help to set up log forwarding to the Syslog agent.\n\n> 1. Go to Setup > Third-party integrations > Defend Alarms and follow the instructions for Microsoft Sentinel.\n\n> 2. Make sure your Onapsis Console can reach the proxy machine where the agent is installed - logs should be sent to port 514 using TCP.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Create Onapsis lookup function for incident enrichment"", ""description"": ""[Follow these steps to get this Kusto function](https://aka.ms/sentinel-Onapsis-parser)""}, {""title"": ""5. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Onapsis%20Platform/Data%20Connectors/OnapsisPlatform.json","true" +"","OneIdentity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneIdentity","quest","oneidentity_mss","2022-05-02","","","One Identity","Partner","https://support.oneidentity.com/","","domains","","","","","","","","false" +"OneLoginEventsV2_CL","OneLoginIAM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM","azuresentinel","azure-sentinel-solution-oneloginiam","2022-08-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OneLogin","OneLogin","[DEPRECATED] OneLogin IAM Platform","The [OneLogin](https://www.onelogin.com/) data connector provides the capability to ingest common OneLogin IAM Platform events into Microsoft Sentinel through Webhooks. The OneLogin Event Webhook API which is also known as the Event Broadcaster will send batches of events in near real-time to an endpoint that you specify. When a change occurs in the OneLogin, an HTTPS POST request with event information is sent to a callback data connector URL. Refer to [Webhooks documentation](https://developers.onelogin.com/api-docs/1/events/webhooks) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This data connector uses Azure Functions based on HTTP Trigger for waiting POST requests with logs to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**OneLogin**](https://aka.ms/sentinel-OneLogin-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the OneLogin**\n\n Follow the [instructions](https://onelogin.service-now.com/kb_view_customer.do?sysparm_article=KB0010469) to configure Webhooks.\n\n1. Generate the **OneLoginBearerToken** according to your password policy.\n2. Set Custom Header in the format: Authorization: Bearer .\n3. Use JSON Array Logs Format.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the OneLogin data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the OneLogin data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-OneLogin-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **OneLoginBearerToken** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n6. After deploying open Function App page, select your app, go to the **Functions** and click **Get Function Url** copy it and follow p.7 from STEP 1.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the OneLogin data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-OneLogin-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tOneLoginBearerToken\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Webhooks Credentials/permissions"", ""description"": ""**OneLoginBearerToken**, **Callback URL** are required for working Webhooks. See the documentation to learn more about [configuring Webhooks](https://onelogin.service-now.com/kb_view_customer.do?sysparm_article=KB0010469).You need to generate **OneLoginBearerToken** according to your security requirements and use it in **Custom Headers** section in format: Authorization: Bearer **OneLoginBearerToken**. Logs Format: JSON Array.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM/Data%20Connectors/OneLogin_Webhooks_FunctionApp.json","true" +"OneLoginUsersV2_CL","OneLoginIAM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM","azuresentinel","azure-sentinel-solution-oneloginiam","2022-08-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OneLogin","OneLogin","[DEPRECATED] OneLogin IAM Platform","The [OneLogin](https://www.onelogin.com/) data connector provides the capability to ingest common OneLogin IAM Platform events into Microsoft Sentinel through Webhooks. The OneLogin Event Webhook API which is also known as the Event Broadcaster will send batches of events in near real-time to an endpoint that you specify. When a change occurs in the OneLogin, an HTTPS POST request with event information is sent to a callback data connector URL. Refer to [Webhooks documentation](https://developers.onelogin.com/api-docs/1/events/webhooks) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This data connector uses Azure Functions based on HTTP Trigger for waiting POST requests with logs to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**OneLogin**](https://aka.ms/sentinel-OneLogin-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the OneLogin**\n\n Follow the [instructions](https://onelogin.service-now.com/kb_view_customer.do?sysparm_article=KB0010469) to configure Webhooks.\n\n1. Generate the **OneLoginBearerToken** according to your password policy.\n2. Set Custom Header in the format: Authorization: Bearer .\n3. Use JSON Array Logs Format.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the OneLogin data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the OneLogin data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-OneLogin-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **OneLoginBearerToken** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n6. After deploying open Function App page, select your app, go to the **Functions** and click **Get Function Url** copy it and follow p.7 from STEP 1.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the OneLogin data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-OneLogin-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tOneLoginBearerToken\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Webhooks Credentials/permissions"", ""description"": ""**OneLoginBearerToken**, **Callback URL** are required for working Webhooks. See the documentation to learn more about [configuring Webhooks](https://onelogin.service-now.com/kb_view_customer.do?sysparm_article=KB0010469).You need to generate **OneLoginBearerToken** according to your security requirements and use it in **Custom Headers** section in format: Authorization: Bearer **OneLoginBearerToken**. Logs Format: JSON Array.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM/Data%20Connectors/OneLogin_Webhooks_FunctionApp.json","true" +"OneLogin_CL","OneLoginIAM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM","azuresentinel","azure-sentinel-solution-oneloginiam","2022-08-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OneLogin","OneLogin","[DEPRECATED] OneLogin IAM Platform","The [OneLogin](https://www.onelogin.com/) data connector provides the capability to ingest common OneLogin IAM Platform events into Microsoft Sentinel through Webhooks. The OneLogin Event Webhook API which is also known as the Event Broadcaster will send batches of events in near real-time to an endpoint that you specify. When a change occurs in the OneLogin, an HTTPS POST request with event information is sent to a callback data connector URL. Refer to [Webhooks documentation](https://developers.onelogin.com/api-docs/1/events/webhooks) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This data connector uses Azure Functions based on HTTP Trigger for waiting POST requests with logs to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**OneLogin**](https://aka.ms/sentinel-OneLogin-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the OneLogin**\n\n Follow the [instructions](https://onelogin.service-now.com/kb_view_customer.do?sysparm_article=KB0010469) to configure Webhooks.\n\n1. Generate the **OneLoginBearerToken** according to your password policy.\n2. Set Custom Header in the format: Authorization: Bearer .\n3. Use JSON Array Logs Format.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the OneLogin data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the OneLogin data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-OneLogin-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **OneLoginBearerToken** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n6. After deploying open Function App page, select your app, go to the **Functions** and click **Get Function Url** copy it and follow p.7 from STEP 1.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the OneLogin data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-OneLogin-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tOneLoginBearerToken\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Webhooks Credentials/permissions"", ""description"": ""**OneLoginBearerToken**, **Callback URL** are required for working Webhooks. See the documentation to learn more about [configuring Webhooks](https://onelogin.service-now.com/kb_view_customer.do?sysparm_article=KB0010469).You need to generate **OneLoginBearerToken** according to your security requirements and use it in **Custom Headers** section in format: Authorization: Bearer **OneLoginBearerToken**. Logs Format: JSON Array.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM/Data%20Connectors/OneLogin_Webhooks_FunctionApp.json","true" +"OneLoginEventsV2_CL","OneLoginIAM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM","azuresentinel","azure-sentinel-solution-oneloginiam","2022-08-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OneLoginIAMLogsCCPDefinition","Microsoft","OneLogin IAM Platform (via Codeless Connector Framework)","The [OneLogin](https://www.onelogin.com/) data connector provides the capability to ingest common OneLogin IAM Platform events into Microsoft Sentinel through REST API by using OneLogin [Events API](https://developers.onelogin.com/api-docs/1/events/get-events) and OneLogin [Users API](https://developers.onelogin.com/api-docs/1/users/get-users). The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""description"": ""To ingest data from OneLogin IAM to Microsoft Sentinel, you have to click on Add Domain button below then you get a pop up to fill the details, provide the required information and click on Connect. You can see the domain endpoints connected in the grid.\n>"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Endpoint"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add domain"", ""title"": ""Add domain"", ""subtitle"": ""Add domain"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""OneLogin Domain"", ""placeholder"": ""Enter your Company's OneLogin Domain"", ""type"": ""text"", ""name"": ""domainName"", ""required"": true, ""description"": ""For example, if your OneLogin Domain is test.onelogin.com, you need to enter only test in the above field.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client ID"", ""placeholder"": ""Enter your Client ID"", ""type"": ""text"", ""name"": ""cId"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client Secret"", ""placeholder"": ""Enter your Client Secret"", ""type"": ""password"", ""name"": ""cSec"", ""required"": true}}]}]}}], ""title"": ""Connect OneLogin IAM Platform to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""OneLogin IAM API Credentials"", ""description"": ""To create API Credentials follow the document link provided here, [Click Here](https://developers.onelogin.com/api-docs/1/getting-started/working-with-api-credentials). \n Make sure to have an account type of either account owner or administrator to create the API credentials. \n Once you create the API Credentials you get your Client ID and Client Secret.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM/Data%20Connectors/OneLoginIAMLogs_ccp/OneLoginIAMLogs_ConnectorDefinition.json","true" +"OneLoginUsersV2_CL","OneLoginIAM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM","azuresentinel","azure-sentinel-solution-oneloginiam","2022-08-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OneLoginIAMLogsCCPDefinition","Microsoft","OneLogin IAM Platform (via Codeless Connector Framework)","The [OneLogin](https://www.onelogin.com/) data connector provides the capability to ingest common OneLogin IAM Platform events into Microsoft Sentinel through REST API by using OneLogin [Events API](https://developers.onelogin.com/api-docs/1/events/get-events) and OneLogin [Users API](https://developers.onelogin.com/api-docs/1/users/get-users). The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""description"": ""To ingest data from OneLogin IAM to Microsoft Sentinel, you have to click on Add Domain button below then you get a pop up to fill the details, provide the required information and click on Connect. You can see the domain endpoints connected in the grid.\n>"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Endpoint"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add domain"", ""title"": ""Add domain"", ""subtitle"": ""Add domain"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""OneLogin Domain"", ""placeholder"": ""Enter your Company's OneLogin Domain"", ""type"": ""text"", ""name"": ""domainName"", ""required"": true, ""description"": ""For example, if your OneLogin Domain is test.onelogin.com, you need to enter only test in the above field.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client ID"", ""placeholder"": ""Enter your Client ID"", ""type"": ""text"", ""name"": ""cId"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client Secret"", ""placeholder"": ""Enter your Client Secret"", ""type"": ""password"", ""name"": ""cSec"", ""required"": true}}]}]}}], ""title"": ""Connect OneLogin IAM Platform to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""OneLogin IAM API Credentials"", ""description"": ""To create API Credentials follow the document link provided here, [Click Here](https://developers.onelogin.com/api-docs/1/getting-started/working-with-api-credentials). \n Make sure to have an account type of either account owner or administrator to create the API credentials. \n Once you create the API Credentials you get your Client ID and Client Secret.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM/Data%20Connectors/OneLoginIAMLogs_ccp/OneLoginIAMLogs_ConnectorDefinition.json","true" +"OneTrustMetadataV3_CL","OneTrust","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneTrust","onetrustllc1594047340198","azure-sentinel-solution-onetrust","2025-10-24","2025-10-24","","OneTrust, LLC","Partner","https://www.onetrust.com/support/","","domains","OnetrustPush","OneTrust","OneTrust","The OneTrust connector for Microsoft Sentinel provides the capability to have near real time visibility into where sensitive data has been located or remediated across across Google Cloud and other OneTrust supported data sources.","[{""title"": ""1. Create ARM Resources and Provide the Required Permissions"", ""description"": ""This connector reads data from the tables that OneTrust uses in a Microsoft Analytics Workspace. If OneTrust's data forwarding option is enabled then raw event data can be sent to the Microsoft Sentinel Ingestion API."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated Configuration and Secure Data Ingestion with Entra Application \nClicking on \""Deploy\"" will trigger the creation of Log Analytics tables and a Data Collection Rule (DCR). \nIt will then create an Entra application, link the DCR to it, and set the entered secret in the application. This setup enables data to be sent securely to the DCR using an Entra token.""}}, {""parameters"": {""label"": ""Deploy OneTrust connector resources"", ""applicationDisplayName"": ""OneTrust Connector Application""}, ""type"": ""DeployPushConnectorButton""}]}, {""title"": ""2. Push your logs into the workspace"", ""description"": ""Use the following parameters to configure the your machine to send the logs to the workspace."", ""instructions"": [{""parameters"": {""label"": ""Tenant ID (Directory ID)"", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Application ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the App Registration Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Secret"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the App Registration Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Endpoint Uri"", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the Data Collection Endpoint Uri""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Rule Immutable ID"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the Data Collection Rule Immutable ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""OneTrust Metadata Stream Name"", ""value"": ""Custom-OneTrustMetadataV3""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rule (DCR). Typically requires Azure RBAC Owner or User Access Administrator role""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneTrust/Data%20Connectors/OneTrustLogs_CCF/OneTrustLogs_connectorDefinition.json","true" +"","Open Systems","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Open%20Systems","opensystemsag1582030008223","azure-sentinel-solution-osag","2025-05-12","","","Open Systems","Partner","https://www.open-systems.com/support","","domains","","","","","","","","false" +"","OpenCTI","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OpenCTI","azuresentinel","azure-sentinel-solution-opencti","2022-09-22","2022-09-22","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"Syslog","OpenVPN","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OpenVPN","azuresentinel","azure-sentinel-solution-openvpn","2022-08-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OpenVPN","OpenVPN","[Deprecated] OpenVPN Server","The [OpenVPN](https://github.com/OpenVPN) data connector provides the capability to ingest OpenVPN Server logs into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**OpenVpnEvent**](https://aka.ms/sentinel-openvpn-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Server where the OpenVPN are forwarded.\n\n> Logs from OpenVPN Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n2. Select **Apply below configuration to my machines** and select the facilities and severities.\n3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Check your OpenVPN logs."", ""description"": ""OpenVPN server logs are written into common syslog file (depending on the Linux distribution used: e.g. /var/log/messages)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OpenVPN/Data%20Connectors/OpenVPN_Syslog.json","true" +"OCI_LogsV2_CL","Oracle Cloud Infrastructure","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Oracle%20Cloud%20Infrastructure","azuresentinel","azure-sentinel-solution-ocilogs","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OCI-Connector-CCP-Definition","Microsoft","Oracle Cloud Infrastructure (via Codeless Connector Framework)","The Oracle Cloud Infrastructure (OCI) data connector provides the capability to ingest OCI Logs from [OCI Stream](https://docs.oracle.com/iaas/Content/Streaming/Concepts/streamingoverview.htm) into Microsoft Sentinel using the [OCI Streaming REST API](https://docs.oracle.com/iaas/api/#/streaming/streaming/20180418).","[{""title"": ""Connect to OCI Streaming API to start collecting Event logs in Microsoft Sentinel"", ""description"": ""1) Log in to the OCI console and access the navigation menu.\n2) In the navigation menu, go to \""Analytics & AI\"" \u2192 \""Streaming\"".\n3) Click \""Create Stream\"".\n4) Select an existing \""Stream Pool\"" or create a new one.\n5) Enter the following details:\n - \""Stream Name\""\n - \""Retention\""\n - \""Number of Partitions\""\n - \""Total Write Rate\""\n - \""Total Read Rate\"" (based on your data volume)\n6) In the navigation menu, go to \""Logging\"" \u2192 \""Service Connectors\"".\n7) Click \""Create Service Connector\"".\n8) Enter the following details:\n - \""Connector Name\""\n - \""Description\""\n - \""Resource Compartment\""\n9) Select the \""Source\"": \""Logging\"".\n10) Select the \""Target\"": \""Streaming\"".\n11) (Optional) Configure \""Log Group\"", \""Filters\"", or use a \""custom search query\"" to stream only the required logs.\n12) Configure the \""Target\"" by selecting the previously created stream.\n13) Click \""Create\"".\n14) Follow the documentation to create a [Private Key and API Key Configuration File](https://docs.oracle.com/en-us/iaas/Content/API/Concepts/apisigningkey.htm).\n\r Note : The connector only supports ingesting data from one partition ID at a time, and that ID must be a single-digit number (e.g., 0, 1, or 2).\n "", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Stream OCID"", ""required"": true, ""placeholder"": ""Provide the OCI Stream OCID (E.g. ocid1.stream.oc1..xxxxxxEXAMPLExxxxxx)"", ""type"": ""text"", ""name"": ""streamId""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Service Endpoint Base URL"", ""required"": true, ""placeholder"": ""Provide the Service Endpoint Base URL: (https://cell-1.streaming.ap-hyderabad-1.oci.oraclecloud.com)"", ""type"": ""text"", ""name"": ""serviceEndpointBaseUrl""}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Cursor Type"", ""name"": ""cursorType"", ""required"": true, ""type"": ""text"", ""placeholder"": ""Select Cursor Type"", ""options"": [{""key"": ""IndividualCursor"", ""text"": ""Individual Cursor""}]}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Partition Id"", ""required"": true, ""placeholder"": ""Provide the Partition Id. (E.g. 0 or 1 or 2)"", ""type"": ""text"", ""name"": ""partitions"", ""description"": ""The partition ID uses zero-based indexing. For example, if a stream has 3 partitions, the valid partition IDs are 0, 1, or 2.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Tenant ID"", ""required"": true, ""placeholder"": ""OCI Tenant ID (E.g. ocid1.tenancy.oc1..xxxxxxEXAMPLExxxxxx)"", ""type"": ""text"", ""name"": ""tenantId""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""User ID"", ""required"": true, ""placeholder"": ""Provide the User Id. (E.g. ocid1.user.oc1..xxxxxxEXAMPLExxxxxx)"", ""type"": ""text"", ""name"": ""userId""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Pem File Content"", ""required"": true, ""placeholder"": ""Provide the Pem File content."", ""type"": ""password"", ""name"": ""pemFile""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Pass Phrase"", ""required"": false, ""placeholder"": ""Provide the pass phrase for the Pem File Content. (Optional)"", ""type"": ""password"", ""name"": ""passPhrase""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Fingerprint"", ""required"": true, ""placeholder"": ""Provide the fingerprint for the Pem File Content. (E.g. 12:34:56:78:90:AB:CD:EF:GH:IJ:KL:MN:OP)"", ""type"": ""password"", ""name"": ""publicFingerprint""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""Connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""OCI Streaming API access"", ""description"": ""Access to the OCI Streaming API through a API Signing Keys is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Oracle%20Cloud%20Infrastructure/Data%20Connectors/Oracle_Cloud_Infrastructure_CCP/OCI_DataConnector_DataConnectorDefinition.json","true" +"OCI_Logs_CL","Oracle Cloud Infrastructure","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Oracle%20Cloud%20Infrastructure","azuresentinel","azure-sentinel-solution-ocilogs","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OracleCloudInfrastructureLogsConnector","Oracle","[DEPRECATED] Oracle Cloud Infrastructure","The Oracle Cloud Infrastructure (OCI) data connector provides the capability to ingest OCI Logs from [OCI Stream](https://docs.oracle.com/iaas/Content/Streaming/Concepts/streamingoverview.htm) into Microsoft Sentinel using the [OCI Streaming REST API](https://docs.oracle.com/iaas/api/#/streaming/streaming/20180418).

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector can go over the 500 column limit of log Analytics. When this happens some logs will be dropped. For this reason the connector can be unrealiable depending on the logs that are being generated and collected.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Azure Blob Storage API to pull logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**OCILogs**](https://aka.ms/sentinel-OracleCloudInfrastructureLogsConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Creating Stream**\n\n1. Log in to OCI console and go to *navigation menu* -> *Analytics & AI* -> *Streaming*\n2. Click *Create Stream*\n3. Select Stream Pool or create a new one\n4. Provide the *Stream Name*, *Retention*, *Number of Partitions*, *Total Write Rate*, *Total Read Rate* based on your data amount.\n5. Go to *navigation menu* -> *Logging* -> *Service Connectors*\n6. Click *Create Service Connector*\n6. Provide *Connector Name*, *Description*, *Resource Compartment*\n7. Select Source: Logging\n8. Select Target: Streaming\n9. (Optional) Configure *Log Group*, *Filters* or use custom search query to stream only logs that you need.\n10. Configure Target - select the strem created before.\n11. Click *Create*\n\nCheck the documentation to get more information about [Streaming](https://docs.oracle.com/en-us/iaas/Content/Streaming/home.htm) and [Service Connectors](https://docs.oracle.com/en-us/iaas/Content/service-connector-hub/home.htm).""}, {""title"": """", ""description"": ""**STEP 2 - Creating credentials for OCI REST API**\n\nFollow the documentation to [create Private Key and API Key Configuration File.](https://docs.oracle.com/en-us/iaas/Content/API/Concepts/apisigningkey.htm)\n\n>**IMPORTANT:** Save Private Key and API Key Configuration File created during this step as they will be used during deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the OCI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as OCI API credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the OCI data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-OracleCloudInfrastructureLogsConnector-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key**, **User**, **Key_content**, **Pass_phrase**, **Fingerprint**, **Tenancy**, **Region**, **Message Endpoint**, **Stream Ocid**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the OCI data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentinel-OracleCloudInfrastructureLogsConnector-functionapp) file. Extract archive to your local development computer..\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAzureSentinelWorkspaceId\n\t\tAzureSentinelSharedKey\n\t\tuser\n\t\tkey_content\n\t\tpass_phrase (Optional)\n\t\tfingerprint\n\t\ttenancy\n\t\tregion\n\t\tMessage Endpoint\n\t\tStreamOcid\n\t\tlogAnalyticsUri (Optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`.\n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""OCI API Credentials"", ""description"": "" **API Key Configuration File** and **Private Key** are required for OCI API connection. See the documentation to learn more about [creating keys for API access](https://docs.oracle.com/en-us/iaas/Content/API/Concepts/apisigningkey.htm)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Oracle%20Cloud%20Infrastructure/Data%20Connectors/OCI_logs_API_FunctionApp.json","true" +"Syslog","OracleDatabaseAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OracleDatabaseAudit","azuresentinel","azure-sentinel-solution-oracledbaudit","2021-11-05","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OracleDatabaseAudit","Oracle","[Deprecated] Oracle Database Audit","The Oracle DB Audit data connector provides the capability to ingest [Oracle Database](https://www.oracle.com/database/technologies/) audit events into Microsoft Sentinel through the syslog. Refer to [documentation](https://docs.oracle.com/en/database/oracle/oracle-database/21/dbseg/introduction-to-auditing.html#GUID-94381464-53A3-421B-8F13-BD171C867405) for more information.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Oracle Database Audit and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OracleDatabaseAudit/Parsers/OracleDatabaseAuditEvent.txt). The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n2. Select **Apply below configuration to my machines** and select the facilities and severities.\n3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure Oracle Database Audit events to be sent to Syslog"", ""description"": ""Follow the below instructions \n\n 1. Create the Oracle database [Follow these steps.](https://learn.microsoft.com/en-us/azure/virtual-machines/workloads/oracle/oracle-database-quick-create) \n\n 2. Login to Oracle database created from the above step [Follow these steps.](https://docs.oracle.com/cd/F49540_01/DOC/server.815/a67772/create.htm) \n\n 3. Enable unified logging over syslog by **Alter the system to enable unified logging** [Following these steps.](https://docs.oracle.com/en/database/oracle/oracle-database/21/refrn/UNIFIED_AUDIT_COMMON_SYSTEMLOG.html#GUID-9F26BC8E-1397-4B0E-8A08-3B12E4F9ED3A) \n\n 4. Create and **enable an Audit policy for unified auditing** [Follow these steps.](https://docs.oracle.com/en/database/oracle/oracle-database/19/sqlrf/CREATE-AUDIT-POLICY-Unified-Auditing.html#GUID-8D6961FB-2E50-46F5-81F7-9AEA314FC693) \n\n 5. **Enabling syslog and Event Viewer** Captures for the Unified Audit Trail [Follow these steps.](https://docs.oracle.com/en/database/oracle/oracle-database/18/dbseg/administering-the-audit-trail.html#GUID-3EFB75DB-AE1C-44E6-B46E-30E5702B0FC4)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OracleDatabaseAudit/Data%20Connectors/Connector_OracleDatabaseAudit.json","true" +"OracleWebLogicServer_CL","OracleWebLogicServer","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OracleWebLogicServer","azuresentinel","azure-sentinel-solution-oracleweblogicserver","2022-01-06","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OracleWebLogicServer","Oracle","[Deprecated] Oracle WebLogic Server","OracleWebLogicServer data connector provides the capability to ingest [OracleWebLogicServer](https://docs.oracle.com/en/middleware/standalone/weblogic-server/index.html) events into Microsoft Sentinel. Refer to [OracleWebLogicServer documentation](https://docs.oracle.com/en/middleware/standalone/weblogic-server/14.1.1.0/index.html) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias OracleWebLogicServerEvent and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OracleWebLogicServer/Parsers/OracleWebLogicServerEvent.yaml). The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Oracle WebLogic Server where the logs are generated.\n\n> Logs from Oracle WebLogic Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the custom log directory to be collected"", ""instructions"": [{""parameters"": {""linkType"": ""OpenCustomLogsSettings""}, ""type"": ""InstallAgent""}]}, {""title"": """", ""description"": ""1. Select the link above to open your workspace advanced settings \n2. From the left pane, select **Data**, select **Custom Logs** and click **Add+**\n3. Click **Browse** to upload a sample of a OracleWebLogicServer log file (e.g. server.log). Then, click **Next >**\n4. Select **New line** as the record delimiter and click **Next >**\n5. Select **Windows** or **Linux** and enter the path to OracleWebLogicServer logs based on your configuration. Example: \n - **Linux** Directory: 'DOMAIN_HOME/servers/server_name/logs/*.log'\n - **Windows** Directory: 'DOMAIN_NAME\\servers\\SERVER_NAME\\logs\\*.log'\n6. After entering the path, click the '+' symbol to apply, then click **Next >** \n7. Add **OracleWebLogicServer_CL** as the custom log Name and click **Done**""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OracleWebLogicServer/Data%20Connectors/Connector_OracleWebLogicServer_agent.json","true" +"OrcaAlerts_CL","Orca Security Alerts","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Orca%20Security%20Alerts","orcasecurityinc1621870991703","orca_security_alerts_mss","2022-05-10","","","Orca Security","Partner","https://orca.security/about/contact/","","domains","OrcaSecurityAlerts","Orca Security","Orca Security Alerts","The Orca Security Alerts connector allows you to easily export Alerts logs to Microsoft Sentinel.","[{""title"": """", ""description"": ""Follow [guidance](https://orcasecurity.zendesk.com/hc/en-us/articles/360043941992-Azure-Sentinel-configuration) for integrating Orca Security Alerts logs with Microsoft Sentinel."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Orca%20Security%20Alerts/Data%20Connectors/OrcaSecurityAlerts.json","true" +"","PCI DSS Compliance","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PCI%20DSS%20Compliance","azuresentinel","azure-sentinel-solution-pcidsscompliance","2022-06-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"","PDNS Block Data Connector","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PDNS%20Block%20Data%20Connector","azuresentinel","azure-sentinel-solution-pdnsblockdataconnector","2023-03-31","","","Nominet PDNS Support","Partner","https://www.protectivedns.service.ncsc.gov.uk/pdns","","domains","","","","","","","","false" +"CommonSecurityLog","Palo Alto - XDR (Cortex)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20-%20XDR%20%28Cortex%29","","","","","","","","","","","PaloAltoNetworksCortex","Palo Alto Networks","Palo Alto Networks Cortex XDR","The Palo Alto Networks Cortex XDR connector gives you an easy way to connect to your Cortex XDR logs with Microsoft Sentinel. This increases the visibility of your endpoint security. It will give you better ability to monitor your resources by creating custom Workbooks, analytics rules, Incident investigation, and evidence gathering.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Azure Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Azure Sentinel will use as the proxy between your security solution and Azure Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Azure Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Palo Alto Networks (Cortex) logs to Syslog agent"", ""description"": ""\n\n> 1. Go to [Cortex Settings and Configurations](https://inspira.xdr.in.paloaltonetworks.com/configuration/external-alerting) and Click to add New Server under External Applications.\n\n> 2. Then specify the name and Give public IP of your syslog server in Destination. \n\n> 3. Give Port number as 514 and from Facility field select FAC_SYSLOG from dropdown. \n\n> 4. Select Protocol as UDP and hit Create.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20-%20XDR%20%28Cortex%29/Data%20Connectors/Connector_PaloAlto_XDR_CEF.json","true" +"PaloAltoCortexXDR_Alerts_CL","Palo Alto Cortex XDR CCP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP","azuresentinel","azure-sentinel-solution-cortexccp","2024-12-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Palo Alto Cortex XDR API \n Follow the instructions to obtain the credentials. you can also follow this [guide](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/3u3j0e7hcx8t1-get-started-with-cortex-xdr-ap-is) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\n 1.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 1.3. Under [**Integrations**] click on [**API Keys**].\n 1.4. In the [**Settings**] Page click on [**Copy API URL**] in the top right corner.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 2.3. Under [**Integrations**] click on [**API Keys**].\n 2.4. In the [**Settings**] Page click on [**New Key**] in the top right corner.\n 2.5. Choose security level, role, choose Standard and click on [**Generate**]\n 2.6. Copy the API Token, once it generated the [**API Token ID**] can be found under the ID column""}}, {""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api-example.xdr.au.paloaltonetworks.com"", ""type"": ""text"", ""name"": ""apiUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Key ID"", ""placeholder"": ""API ID"", ""type"": ""text"", ""name"": ""apiId""}, ""type"": ""Textbox""}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""apiToken""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" +"PaloAltoCortexXDR_Audit_Agent_CL","Palo Alto Cortex XDR CCP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP","azuresentinel","azure-sentinel-solution-cortexccp","2024-12-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Palo Alto Cortex XDR API \n Follow the instructions to obtain the credentials. you can also follow this [guide](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/3u3j0e7hcx8t1-get-started-with-cortex-xdr-ap-is) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\n 1.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 1.3. Under [**Integrations**] click on [**API Keys**].\n 1.4. In the [**Settings**] Page click on [**Copy API URL**] in the top right corner.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 2.3. Under [**Integrations**] click on [**API Keys**].\n 2.4. In the [**Settings**] Page click on [**New Key**] in the top right corner.\n 2.5. Choose security level, role, choose Standard and click on [**Generate**]\n 2.6. Copy the API Token, once it generated the [**API Token ID**] can be found under the ID column""}}, {""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api-example.xdr.au.paloaltonetworks.com"", ""type"": ""text"", ""name"": ""apiUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Key ID"", ""placeholder"": ""API ID"", ""type"": ""text"", ""name"": ""apiId""}, ""type"": ""Textbox""}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""apiToken""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" +"PaloAltoCortexXDR_Audit_Management_CL","Palo Alto Cortex XDR CCP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP","azuresentinel","azure-sentinel-solution-cortexccp","2024-12-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Palo Alto Cortex XDR API \n Follow the instructions to obtain the credentials. you can also follow this [guide](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/3u3j0e7hcx8t1-get-started-with-cortex-xdr-ap-is) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\n 1.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 1.3. Under [**Integrations**] click on [**API Keys**].\n 1.4. In the [**Settings**] Page click on [**Copy API URL**] in the top right corner.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 2.3. Under [**Integrations**] click on [**API Keys**].\n 2.4. In the [**Settings**] Page click on [**New Key**] in the top right corner.\n 2.5. Choose security level, role, choose Standard and click on [**Generate**]\n 2.6. Copy the API Token, once it generated the [**API Token ID**] can be found under the ID column""}}, {""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api-example.xdr.au.paloaltonetworks.com"", ""type"": ""text"", ""name"": ""apiUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Key ID"", ""placeholder"": ""API ID"", ""type"": ""text"", ""name"": ""apiId""}, ""type"": ""Textbox""}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""apiToken""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" +"PaloAltoCortexXDR_Endpoints_CL","Palo Alto Cortex XDR CCP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP","azuresentinel","azure-sentinel-solution-cortexccp","2024-12-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Palo Alto Cortex XDR API \n Follow the instructions to obtain the credentials. you can also follow this [guide](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/3u3j0e7hcx8t1-get-started-with-cortex-xdr-ap-is) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\n 1.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 1.3. Under [**Integrations**] click on [**API Keys**].\n 1.4. In the [**Settings**] Page click on [**Copy API URL**] in the top right corner.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 2.3. Under [**Integrations**] click on [**API Keys**].\n 2.4. In the [**Settings**] Page click on [**New Key**] in the top right corner.\n 2.5. Choose security level, role, choose Standard and click on [**Generate**]\n 2.6. Copy the API Token, once it generated the [**API Token ID**] can be found under the ID column""}}, {""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api-example.xdr.au.paloaltonetworks.com"", ""type"": ""text"", ""name"": ""apiUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Key ID"", ""placeholder"": ""API ID"", ""type"": ""text"", ""name"": ""apiId""}, ""type"": ""Textbox""}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""apiToken""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" +"PaloAltoCortexXDR_Incidents_CL","Palo Alto Cortex XDR CCP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP","azuresentinel","azure-sentinel-solution-cortexccp","2024-12-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Palo Alto Cortex XDR API \n Follow the instructions to obtain the credentials. you can also follow this [guide](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/3u3j0e7hcx8t1-get-started-with-cortex-xdr-ap-is) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\n 1.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 1.3. Under [**Integrations**] click on [**API Keys**].\n 1.4. In the [**Settings**] Page click on [**Copy API URL**] in the top right corner.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 2.3. Under [**Integrations**] click on [**API Keys**].\n 2.4. In the [**Settings**] Page click on [**New Key**] in the top right corner.\n 2.5. Choose security level, role, choose Standard and click on [**Generate**]\n 2.6. Copy the API Token, once it generated the [**API Token ID**] can be found under the ID column""}}, {""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api-example.xdr.au.paloaltonetworks.com"", ""type"": ""text"", ""name"": ""apiUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Key ID"", ""placeholder"": ""API ID"", ""type"": ""text"", ""name"": ""apiId""}, ""type"": ""Textbox""}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""apiToken""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" +"CortexXpanseAlerts_CL","Palo Alto Cortex Xpanse CCF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20Xpanse%20CCF","azuresentinel","azure-sentinel-solution-cortexxpanse","2024-12-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PaloAltoExpanseCCPDefinition","Microsoft","Palo Alto Cortex Xpanse (via Codeless Connector Framework)","The Palo Alto Cortex Xpanse data connector ingests alerts data into Microsoft Sentinel.","[{""description"": ""To ingest data from Palo Alto Cortex Xpanse to Microsoft Sentinel, click on **Add Domain**. Fill in the required details in the pop-up and click Connect. You will see connected domain endpoints in the grid below. To get the Auth ID and API Key, go to **Settings \u2192 Configuration \u2192 Integrations \u2192 API Keys** in the Cortex Xpanse portal and generate new credentials."", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Endpoint"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add domain"", ""title"": ""Add domain"", ""subtitle"": ""Add domain"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Domain Name"", ""placeholder"": ""e.g., example.crtx.us.paloaltonetworks.com"", ""type"": ""text"", ""name"": ""domainName"", ""required"": true, ""description"": ""Enter the domain suffix to be used in the API endpoint, e.g., `example.crtx.us.paloaltonetworks.com`""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""Enter your Palo Alto Xpanse API Key"", ""type"": ""password"", ""name"": ""apiKey"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Xpanse Auth ID"", ""placeholder"": ""Enter your Xpanse Auth ID"", ""type"": ""text"", ""name"": ""xpanseAuthId"", ""required"": true}}]}]}}], ""title"": ""Connect Palo Alto Xpanse to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20Xpanse%20CCF/Data%20Connectors/CortexXpanse_ccp/CortexXpanse_ConnectorDefinition.json","true" +"PrismaCloudCompute_CL","Palo Alto Prisma Cloud CWPP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Prisma%20Cloud%20CWPP","azuresentinel","azure-sentinel-solution-prismacloudcompute","2022-06-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","PaloAltoPrismaCloudCWPP","Microsoft","Palo Alto Prisma Cloud CWPP (using REST API)","The [Palo Alto Prisma Cloud CWPP](https://prisma.pan.dev/api/cloud/cwpp/audits/#operation/get-audits-incidents) data connector allows you to connect to your Palo Alto Prisma Cloud CWPP instance and ingesting alerts into Microsoft Sentinel. The data connector is built on Microsoft Sentinel's Codeless Connector Platform and uses the Prisma Cloud API to fetch security events and supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance.","[{""description"": ""To enable the Palo Alto Prisma Cloud CWPP Security Events for Microsoft Sentinel, provide the required information below and click on Connect.\n>"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Path to console"", ""placeholder"": ""europe-west3.cloud.twistlock.com/{sasid}"", ""type"": ""text"", ""name"": ""domainname""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Prisma Access Key (API)"", ""placeholder"": ""Prisma Access Key (API)"", ""type"": ""text"", ""name"": ""username""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Secret"", ""placeholder"": ""Secret"", ""type"": ""password"", ""name"": ""password""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""title"": ""Connect Palo Alto Prisma Cloud CWPP Security Events to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""PrismaCloudCompute API Key"", ""description"": ""A Palo Alto Prisma Cloud CWPP Monitor API username and password is required. [See the documentation to learn more about PrismaCloudCompute SIEM API](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Prisma%20Cloud%20CWPP/Data%20Connectors/readme.md).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Prisma%20Cloud%20CWPP/Data%20Connectors/PaloAltoPrismaCloudCWPP_ccp/connectorDefinition.json","true" +"PrismaCloudCompute_CL","Palo Alto Prisma Cloud CWPP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Prisma%20Cloud%20CWPP","azuresentinel","azure-sentinel-solution-prismacloudcompute","2022-06-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","PrismaCloudComputeNativePoller","Microsoft","Palo Alto Prisma Cloud CWPP (using REST API)","The [Palo Alto Prisma Cloud CWPP](https://prisma.pan.dev/api/cloud/cwpp/audits/#operation/get-audits-incidents) data connector allows you to connect to your Prisma Cloud CWPP instance and ingesting alerts into Microsoft Sentinel. The data connector is built on Microsoft Sentinel’s Codeless Connector Platform and uses the Prisma Cloud API to fetch security events and supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance.","[{""description"": ""To enable the Palo Alto Prisma Cloud CWPP Security Events for Microsoft Sentinel, provide the required information below and click on Connect.\n>"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Path to console"", ""placeholder"": ""https://europe-west3.cloud.twistlock.com/{sasid}"", ""type"": ""text"", ""name"": ""domainname""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Prisma Access Key (API)"", ""placeholder"": ""Prisma Access Key (API)"", ""type"": ""text"", ""name"": ""username""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Secret"", ""placeholder"": ""Secret"", ""type"": ""password"", ""name"": ""password""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""title"": ""Connect Palo Alto Prisma Cloud CWPP Security Events to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""PrismaCloudCompute API Key"", ""description"": ""A Palo Alto Prisma Cloud CWPP Monitor API username and password is required. [See the documentation to learn more about PrismaCloudCompute SIEM API](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Prisma%20Cloud%20CWPP/Data%20Connectors/readme.md).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Prisma%20Cloud%20CWPP/Data%20Connectors/PrismaCloudCompute_CLV2.json","true" +"CommonSecurityLog","PaloAlto-PAN-OS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAlto-PAN-OS","azuresentinel","azure-sentinel-solution-paloaltopanos","2021-08-09","2021-09-20","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PaloAltoNetworks","Palo Alto Networks","[Deprecated] Palo Alto Networks (Firewall) via Legacy Agent","The Palo Alto Networks firewall connector allows you to easily connect your Palo Alto Networks logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Palo Alto Networks logs to Syslog agent"", ""description"": ""Configure Palo Alto Networks to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent.\n\nGo to [configure Palo Alto Networks NGFW for sending CEF events.](https://aka.ms/sentinel-paloaltonetworks-readme)\n\nGo to [Palo Alto CEF Configuration](https://aka.ms/asi-syslog-paloalto-forwarding) and Palo Alto [Configure Syslog Monitoring](https://aka.ms/asi-syslog-paloalto-configure) steps 2, 3, choose your version, and follow the instructions using the following guidelines:\n\n1. Set the Syslog server format to **BSD**.\n\n2. The copy/paste operations from the PDF might change the text and insert random characters. To avoid this, copy the text to an editor and remove any characters that might break the log format before pasting it.\n\n[Learn more >](https://aka.ms/CEFPaloAlto)""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAlto-PAN-OS/Data%20Connectors/PaloAltoNetworks.json","true" +"CommonSecurityLog","PaloAlto-PAN-OS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAlto-PAN-OS","azuresentinel","azure-sentinel-solution-paloaltopanos","2021-08-09","2021-09-20","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PaloAltoNetworksAma","Palo Alto Networks","[Deprecated] Palo Alto Networks (Firewall) via AMA","The Palo Alto Networks firewall connector allows you to easily connect your Palo Alto Networks logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": """", ""description"": """", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Forward Palo Alto Networks logs to Syslog agent"", ""description"": ""Configure Palo Alto Networks to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent.\n\nGo to [configure Palo Alto Networks NGFW for sending CEF events.](https://aka.ms/sentinel-paloaltonetworks-readme)\n\nGo to [Palo Alto CEF Configuration](https://aka.ms/asi-syslog-paloalto-forwarding) and Palo Alto [Configure Syslog Monitoring](https://aka.ms/asi-syslog-paloalto-configure) steps 2, 3, choose your version, and follow the instructions using the following guidelines:\n\n1. Set the Syslog server format to **BSD**.\n\n2. The copy/paste operations from the PDF might change the text and insert random characters. To avoid this, copy the text to an editor and remove any characters that might break the log format before pasting it.\n\n[Learn more >](https://aka.ms/CEFPaloAlto)"", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAlto-PAN-OS/Data%20Connectors/template_PaloAltoNetworksAMA.json","true" +"CommonSecurityLog","PaloAltoCDL","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoCDL","azuresentinel","azure-sentinel-solution-paloaltocdl","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PaloAltoCDL","Palo Alto Networks","[Deprecated] Palo Alto Networks Cortex Data Lake (CDL) via Legacy Agent","The [Palo Alto Networks CDL](https://www.paloaltonetworks.com/cortex/cortex-data-lake) data connector provides the capability to ingest [CDL logs](https://docs.paloaltonetworks.com/strata-logging-service/log-reference/log-forwarding-schema-overview) into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**PaloAltoCDLEvent**](https://aka.ms/sentinel-paloaltocdl-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Configure Cortex Data Lake to forward logs to a Syslog Server using CEF"", ""description"": ""[Follow the instructions](https://docs.paloaltonetworks.com/cortex/cortex-data-lake/cortex-data-lake-getting-started/get-started-with-log-forwarding-app/forward-logs-from-logging-service-to-syslog-server.html) to configure logs forwarding from Cortex Data Lake to a Syslog Server.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoCDL/Data%20Connectors/Connector_PaloAlto_CDL_CEF.json","true" +"CommonSecurityLog","PaloAltoCDL","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoCDL","azuresentinel","azure-sentinel-solution-paloaltocdl","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PaloAltoCDLAma","Palo Alto Networks","[Deprecated] Palo Alto Networks Cortex Data Lake (CDL) via AMA","The [Palo Alto Networks CDL](https://www.paloaltonetworks.com/cortex/cortex-data-lake) data connector provides the capability to ingest [CDL logs](https://docs.paloaltonetworks.com/strata-logging-service/log-reference/log-forwarding-schema-overview) into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**PaloAltoCDLEvent**](https://aka.ms/sentinel-paloaltocdl-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Configure Cortex Data Lake to forward logs to a Syslog Server using CEF"", ""description"": ""[Follow the instructions](https://docs.paloaltonetworks.com/cortex/cortex-data-lake/cortex-data-lake-getting-started/get-started-with-log-forwarding-app/forward-logs-from-logging-service-to-syslog-server.html) to configure logs forwarding from Cortex Data Lake to a Syslog Server."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoCDL/Data%20Connectors/template_PaloAlto_CDLAMA.json","true" +"PaloAltoPrismaCloudAlert_CL","PaloAltoPrismaCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud","azuresentinel","azure-sentinel-solution-paloaltoprisma","2021-04-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PaloAltoPrismaCloud","Palo Alto","[DEPRECATED] Palo Alto Prisma Cloud CSPM","The Palo Alto Prisma Cloud CSPM data connector provides the capability to ingest [Prisma Cloud CSPM alerts](https://prisma.pan.dev/api/cloud/cspm/alerts#operation/get-alerts) and [audit logs](https://prisma.pan.dev/api/cloud/cspm/audit-logs#operation/rl-audit-logs) into Microsoft sentinel using the Prisma Cloud CSPM API. Refer to [Prisma Cloud CSPM API documentation](https://prisma.pan.dev/api/cloud/cspm) for more information.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Palo Alto Prisma Cloud REST API to pull logs into Microsoft sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**PaloAltoPrismaCloud**](https://aka.ms/sentinel-PaloAltoPrismaCloud-parser) which is deployed with the Microsoft sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Prisma Cloud**\n\nFollow the documentation to [create Prisma Cloud Access Key](https://docs.paloaltonetworks.com/prisma/prisma-cloud/prisma-cloud-admin/manage-prisma-cloud-administrators/create-access-keys.html) and [obtain Prisma Cloud API Url](https://api.docs.prismacloud.io/reference)\n\n NOTE: Please use SYSTEM ADMIN role for giving access to Prisma Cloud API because only SYSTEM ADMIN role is allowed to View Prisma Cloud Audit Logs. Refer to [Prisma Cloud Administrator Permissions (paloaltonetworks.com)](https://docs.paloaltonetworks.com/prisma/prisma-cloud/prisma-cloud-admin/manage-prisma-cloud-administrators/prisma-cloud-admin-permissions) for more details of administrator permissions.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Prisma Cloud data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Prisma Cloud API credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Prisma Cloud data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-PaloAltoPrismaCloud-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Prisma Cloud API Url**, **Prisma Cloud Access Key ID**, **Prisma Cloud Secret Key**, **Microsoft sentinel Workspace Id**, **Microsoft sentinel Shared Key**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Prisma Cloud data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-PaloAltoPrismaCloud-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tPrismaCloudAPIUrl\n\t\tPrismaCloudAccessKeyID\n\t\tPrismaCloudSecretKey\n\t\tAzureSentinelWorkspaceId\n\t\tAzureSentinelSharedKey\n\t\tlogAnalyticsUri (Optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Palo Alto Prisma Cloud API Credentials"", ""description"": ""**Prisma Cloud API Url**, **Prisma Cloud Access Key ID**, **Prisma Cloud Secret Key** are required for Prisma Cloud API connection. See the documentation to learn more about [creating Prisma Cloud Access Key](https://docs.paloaltonetworks.com/prisma/prisma-cloud/prisma-cloud-admin/manage-prisma-cloud-administrators/create-access-keys.html) and about [obtaining Prisma Cloud API Url](https://prisma.pan.dev/api/cloud/api-urls)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud/Data%20Connectors/PrismaCloud_API_FunctionApp.json","true" +"PaloAltoPrismaCloudAudit_CL","PaloAltoPrismaCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud","azuresentinel","azure-sentinel-solution-paloaltoprisma","2021-04-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PaloAltoPrismaCloud","Palo Alto","[DEPRECATED] Palo Alto Prisma Cloud CSPM","The Palo Alto Prisma Cloud CSPM data connector provides the capability to ingest [Prisma Cloud CSPM alerts](https://prisma.pan.dev/api/cloud/cspm/alerts#operation/get-alerts) and [audit logs](https://prisma.pan.dev/api/cloud/cspm/audit-logs#operation/rl-audit-logs) into Microsoft sentinel using the Prisma Cloud CSPM API. Refer to [Prisma Cloud CSPM API documentation](https://prisma.pan.dev/api/cloud/cspm) for more information.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Palo Alto Prisma Cloud REST API to pull logs into Microsoft sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**PaloAltoPrismaCloud**](https://aka.ms/sentinel-PaloAltoPrismaCloud-parser) which is deployed with the Microsoft sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Prisma Cloud**\n\nFollow the documentation to [create Prisma Cloud Access Key](https://docs.paloaltonetworks.com/prisma/prisma-cloud/prisma-cloud-admin/manage-prisma-cloud-administrators/create-access-keys.html) and [obtain Prisma Cloud API Url](https://api.docs.prismacloud.io/reference)\n\n NOTE: Please use SYSTEM ADMIN role for giving access to Prisma Cloud API because only SYSTEM ADMIN role is allowed to View Prisma Cloud Audit Logs. Refer to [Prisma Cloud Administrator Permissions (paloaltonetworks.com)](https://docs.paloaltonetworks.com/prisma/prisma-cloud/prisma-cloud-admin/manage-prisma-cloud-administrators/prisma-cloud-admin-permissions) for more details of administrator permissions.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Prisma Cloud data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Prisma Cloud API credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Prisma Cloud data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-PaloAltoPrismaCloud-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Prisma Cloud API Url**, **Prisma Cloud Access Key ID**, **Prisma Cloud Secret Key**, **Microsoft sentinel Workspace Id**, **Microsoft sentinel Shared Key**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Prisma Cloud data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-PaloAltoPrismaCloud-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tPrismaCloudAPIUrl\n\t\tPrismaCloudAccessKeyID\n\t\tPrismaCloudSecretKey\n\t\tAzureSentinelWorkspaceId\n\t\tAzureSentinelSharedKey\n\t\tlogAnalyticsUri (Optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Palo Alto Prisma Cloud API Credentials"", ""description"": ""**Prisma Cloud API Url**, **Prisma Cloud Access Key ID**, **Prisma Cloud Secret Key** are required for Prisma Cloud API connection. See the documentation to learn more about [creating Prisma Cloud Access Key](https://docs.paloaltonetworks.com/prisma/prisma-cloud/prisma-cloud-admin/manage-prisma-cloud-administrators/create-access-keys.html) and about [obtaining Prisma Cloud API Url](https://prisma.pan.dev/api/cloud/api-urls)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud/Data%20Connectors/PrismaCloud_API_FunctionApp.json","true" +"PaloAltoPrismaCloudAlertV2_CL","PaloAltoPrismaCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud","azuresentinel","azure-sentinel-solution-paloaltoprisma","2021-04-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PaloAltoPrismaCloudCSPMCCPDefinition","Microsoft","Palo Alto Prisma Cloud CSPM (via Codeless Connector Framework)","The Palo Alto Prisma Cloud CSPM data connector allows you to connect to your Palo Alto Prisma Cloud CSPM instance and ingesting Alerts (https://pan.dev/prisma-cloud/api/cspm/alerts/) & Audit Logs(https://pan.dev/prisma-cloud/api/cspm/audit-logs/) into Microsoft Sentinel.","[{""description"": ""To get more information on how to obtain the Prisma Cloud Access Key, Secret Key, and Base URL, please refer to the[connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud/Data%20Connectors/Readme.md), provide the required information below and click on Connect.\n>"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Prisma Cloud Access Key"", ""placeholder"": ""Enter Access Key"", ""type"": ""text"", ""name"": ""username""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Prisma Cloud Secret Key"", ""placeholder"": ""Enter Secret Key"", ""type"": ""password"", ""name"": ""password""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Prisma Cloud Base URL"", ""placeholder"": ""https://api2.eu.prismacloud.io"", ""type"": ""text"", ""name"": ""baseurl""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""PaloAltoPrismaCloudCSPM Api Endpoints"", ""columnValue"": ""properties.request.apiEndpoint""}]}}], ""title"": ""Connect Palo Alto Prisma Cloud CSPM Events to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud/Data%20Connectors/PrismaCloudCSPMLog_CCF/PaloAltoPrismaCloudCSPMLog_ConnectorDefinition.json","true" +"PaloAltoPrismaCloudAuditV2_CL","PaloAltoPrismaCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud","azuresentinel","azure-sentinel-solution-paloaltoprisma","2021-04-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PaloAltoPrismaCloudCSPMCCPDefinition","Microsoft","Palo Alto Prisma Cloud CSPM (via Codeless Connector Framework)","The Palo Alto Prisma Cloud CSPM data connector allows you to connect to your Palo Alto Prisma Cloud CSPM instance and ingesting Alerts (https://pan.dev/prisma-cloud/api/cspm/alerts/) & Audit Logs(https://pan.dev/prisma-cloud/api/cspm/audit-logs/) into Microsoft Sentinel.","[{""description"": ""To get more information on how to obtain the Prisma Cloud Access Key, Secret Key, and Base URL, please refer to the[connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud/Data%20Connectors/Readme.md), provide the required information below and click on Connect.\n>"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Prisma Cloud Access Key"", ""placeholder"": ""Enter Access Key"", ""type"": ""text"", ""name"": ""username""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Prisma Cloud Secret Key"", ""placeholder"": ""Enter Secret Key"", ""type"": ""password"", ""name"": ""password""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Prisma Cloud Base URL"", ""placeholder"": ""https://api2.eu.prismacloud.io"", ""type"": ""text"", ""name"": ""baseurl""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""PaloAltoPrismaCloudCSPM Api Endpoints"", ""columnValue"": ""properties.request.apiEndpoint""}]}}], ""title"": ""Connect Palo Alto Prisma Cloud CSPM Events to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud/Data%20Connectors/PrismaCloudCSPMLog_CCF/PaloAltoPrismaCloudCSPMLog_ConnectorDefinition.json","true" +"ABAPAuditLog","Pathlock_TDnR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Pathlock_TDnR","pathlockinc1631410274035","pathlock_tdnr","2022-02-17","","","Pathlock Inc.","Partner","https://pathlock.com/support/","","domains,verticals","Pathlock_TDnR","Pathlock Inc.","Pathlock Inc.: Threat Detection and Response for SAP","The [Pathlock Threat Detection and Response (TD&R)](https://pathlock.com/products/cybersecurity-application-controls/) integration with **Microsoft Sentinel Solution for SAP** delivers unified, real-time visibility into SAP security events, enabling organizations to detect and act on threats across all SAP landscapes. This out-of-the-box integration allows Security Operations Centers (SOCs) to correlate SAP-specific alerts with enterprise-wide telemetry, creating actionable intelligence that connects IT security with business processes.

Pathlock’s connector is purpose-built for SAP and forwards only **security-relevant events by default**, minimizing data volume and noise while maintaining the flexibility to forward all log sources when needed. Each event is enriched with **business process context**, allowing Microsoft Sentinel Solution for SAP analytics to distinguish operational patterns from real threats and to prioritize what truly matters.

This precision-driven approach helps security teams drastically reduce false positives, focus investigations, and accelerate **mean time to detect (MTTD)** and **mean time to respond (MTTR)**. Pathlock’s library consists of more than 1,500 SAP-specific detection signatures across 70+ log sources, the solution uncovers complex attack behaviors, configuration weaknesses, and access anomalies.

By combining business-context intelligence with advanced analytics, Pathlock enables enterprises to strengthen detection accuracy, streamline response actions, and maintain continuous control across their SAP environments—without adding complexity or redundant monitoring layers.","[{""title"": ""1. Create ARM Resources and Provide the Required Permissions"", ""description"": ""We will create data collection rule (DCR) and data collection endpoint (DCE) resources. We will also create a Microsoft Entra app registration and assign the required permissions to it."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated deployment of Azure resources\nClicking on \""Deploy push connector resources\"" will trigger the creation of DCR and DCE resources.\nIt will then create a Microsoft Entra app registration with client secret and grant permissions on the DCR. This setup enables data to be sent securely to the DCR using a OAuth v2 client credentials.""}}, {""parameters"": {""label"": ""Deploy push connector resources"", ""applicationDisplayName"": ""Pathlock Inc. Threat Detection and Response for SAP""}, ""type"": ""DeployPushConnectorButton_test""}]}, {""title"": ""2. Maintain the data collection endpoint details and authentication info in your central instance of Pathlock's Cybersecurity Application Controls: Threat Detection and Response"", ""description"": ""Share the data collection endpoint URL and authentication info with the Pathlock administrator to configure the plug and play forwarding in Threat Detection and Response to send data to the data collection endpoint.\nPlease do not hesitate to contact Pathlock if support is needed.\n\n"", ""instructions"": [{""parameters"": {""label"": ""Use this value to configure as Tenant ID in the LogIngestionAPI credential."", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra Application ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra Application Secret"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the Application Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Use this value to configure the LogsIngestionURL parameter when deploying the IFlow."", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the DCE URI""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""DCR Immutable ID"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the DCR ID""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rules. Typically requires Azure RBAC Owner or User Access Administrator role.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Pathlock_TDnR/Data%20Connectors/Pathlock_TDnR_PUSH_CCP/Pathlock_TDnR_connectorDefinition.json","true" +"Pathlock_TDnR_CL","Pathlock_TDnR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Pathlock_TDnR","pathlockinc1631410274035","pathlock_tdnr","2022-02-17","","","Pathlock Inc.","Partner","https://pathlock.com/support/","","domains,verticals","Pathlock_TDnR","Pathlock Inc.","Pathlock Threat Detection and Response Integration","Pathlock Threat Detection and Response enables seamless forwarding of security alerts and logs detected and collected by the Pathlock Platform into Microsoft Sentinel Solution for SAP.","[{""title"": ""1. Create ARM Resources and Provide the Required Permissions"", ""description"": ""We will create data collection rule (DCR) and data collection endpoint (DCE) resources. We will also create a Microsoft Entra app registration and assign the required permissions to it."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated deployment of Azure resources\nClicking on \""Deploy push connector resources\"" will trigger the creation of DCR and DCE resources.\nIt will then create a Microsoft Entra app registration with client secret and grant permissions on the DCR. This setup enables data to be sent securely to the DCR using a OAuth v2 client credentials.""}}, {""parameters"": {""label"": ""Deploy push connector resources"", ""applicationDisplayName"": ""Pathlock Threat Detection and Response forwarding to Microsoft Sentinel Solution for SAP""}, ""type"": ""DeployPushConnectorButton_test""}]}, {""title"": ""2. Maintain the data collection endpoint details and authentication info in Pathlock Threat Detection and Response"", ""description"": ""Share the data collection endpoint URL and authentication info with the Pathlock Threat Detection and Response Integration administrator to configure the Integration."", ""instructions"": [{""parameters"": {""label"": ""Use this value to configure as Tenant ID in the LogIngestionAPI credential."", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra Application ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra Application Secret"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the Application Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Use this value to configure the LogsIngestionURL parameter."", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the DCE URI""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""DCR Immutable ID"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the DCR ID""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rules. Typically requires Azure RBAC Owner or User Access Administrator role.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Pathlock_TDnR/Data%20Connectors/Pathlock_TDnR.json","true" +"Perimeter81_CL","Perimeter 81","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Perimeter%2081","perimeter811605117499319","perimeter_81___mss","2022-05-06","","","Perimeter 81","Partner","https://support.perimeter81.com/docs","","domains","Perimeter81ActivityLogs","Perimeter 81","Perimeter 81 Activity Logs","The Perimeter 81 Activity Logs connector allows you to easily connect your Perimeter 81 activity logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation.","[{""title"": """", ""description"": ""Please note the values below and follow the instructions here to connect your Perimeter 81 activity logs with Microsoft Sentinel."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Perimeter%2081/Data%20Connectors/Perimeter81ActivityLogs.json","true" +"Phosphorus_CL","Phosphorus","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Phosphorus","4043","microsoft-sentinel-solution-phosphorus","2024-08-13","2024-08-13","","Phosphorus Inc.","Partner","https://phosphorus.io","","domains","Phosphorus_Polling","Phosphorus Inc.","Phosphorus Devices","The Phosphorus Device Connector provides the capability to Phosphorus to ingest device data logs into Microsoft Sentinel through the Phosphorus REST API. The Connector provides visibility into the devices enrolled in Phosphorus. This Data Connector pulls devices information along with its corresponding alerts.","[{""description"": ""**STEP 1 - Configuration steps for the Phosphorus API**\n\n Follow these instructions to create a Phosphorus API key.\n 1. Log into your Phosphorus instance\n 2. Navigate to Settings -> API \n 3. If the API key has not already been created, press the **Add button** to create the API key\n 4. The API key can now be copied and used during the Phosphorus Device connector configuration""}, {""title"": ""Connect the Phosphorus Application with Microsoft Sentinel"", ""description"": ""**STEP 2 - Fill in the details below**\n\n>**IMPORTANT:** Before deploying the Phosphorus Device data connector, have the Phosphorus Instance Domain Name readily available as well as the Phosphorus API Key(s)"", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Domain Name"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{urlPlaceHolder}}"", ""placeHolderValue"": """"}, {""displayText"": ""Integration Name"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{integrationName}}"", ""placeHolderValue"": """"}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true, ""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""REST API Credentials/permissions"", ""description"": ""**Phosphorus API Key** is required. Please make sure that the API Key associated with the User has the Manage Settings permissions enabled.\n\n Follow these instructions to enable Manage Settings permissions.\n 1. Log in to the Phosphorus Application\n 2. Go to 'Settings' -> 'Groups'\n 3. Select the Group the Integration user is a part of\n 4. Navigate to 'Product Actions' -> toggle on the 'Manage Settings' permission. ""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Phosphorus/Data%20Connectors/PhosphorusDataConnector.json","true" +"CommonSecurityLog","PingFederate","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PingFederate","azuresentinel","azure-sentinel-solution-pingfederate","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PingFederate","Ping Identity","[Deprecated] PingFederate via Legacy Agent","The [PingFederate](https://www.pingidentity.com/en/software/pingfederate.html) data connector provides the capability to ingest [PingFederate events](https://docs.pingidentity.com/bundle/pingfederate-102/page/lly1564002980532.html) into Microsoft Sentinel. Refer to [PingFederate documentation](https://docs.pingidentity.com/bundle/pingfederate-102/page/tle1564002955874.html) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**PingFederateEvent**](https://aka.ms/sentinel-PingFederate-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""[Follow these steps](https://docs.pingidentity.com/bundle/pingfederate-102/page/gsn1564002980953.html) to configure PingFederate sending audit log via syslog in CEF format.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PingFederate/Data%20Connectors/Connector_CEF_PingFederate.json","true" +"CommonSecurityLog","PingFederate","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PingFederate","azuresentinel","azure-sentinel-solution-pingfederate","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PingFederateAma","Ping Identity","[Deprecated] PingFederate via AMA","The [PingFederate](https://www.pingidentity.com/en/software/pingfederate.html) data connector provides the capability to ingest [PingFederate events](https://docs.pingidentity.com/bundle/pingfederate-102/page/lly1564002980532.html) into Microsoft Sentinel. Refer to [PingFederate documentation](https://docs.pingidentity.com/bundle/pingfederate-102/page/tle1564002955874.html) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**PingFederateEvent**](https://aka.ms/sentinel-PingFederate-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""[Follow these steps](https://docs.pingidentity.com/bundle/pingfederate-102/page/gsn1564002980953.html) to configure PingFederate sending audit log via syslog in CEF format."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PingFederate/Data%20Connectors/template_PingFederateAMA.json","true" +"PingOne_AuditActivitiesV2_CL","PingOne","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PingOne","azuresentinel","azure-sentinel-pingone","2025-04-20","2025-04-20","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PingOneAuditLogsCCPDefinition","Microsoft","Ping One (via Codeless Connector Framework)","This connector ingests **audit activity logs** from the PingOne Identity platform into Microsoft Sentinel using a Codeless Connector Framework.","[{""title"": ""Connect Ping One connector to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""Before connecting to PingOne, ensure the following prerequisites are completed. Refer to the [document](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PingOne/README.md) for detailed setup instructions, including how to obtain client credentials and the environment ID.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Client Credentials \n You'll need client credentials, including your client id and client secret.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Environment Id \n To generate token and gather logs from audit activities endpoint""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Environment ID"", ""columnValue"": ""properties.addOnAttributes.EnvironmentId""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add domain"", ""title"": ""Add domain"", ""subtitle"": ""Add domain"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Client ID"", ""placeholder"": ""Enter ID of the client"", ""type"": ""text"", ""name"": ""clientId"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client Secret"", ""placeholder"": ""Enter your secret key"", ""type"": ""password"", ""name"": ""clientSecret"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Environment ID"", ""placeholder"": ""Enter your environment Id "", ""type"": ""text"", ""name"": ""environmentId"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Api domain"", ""placeholder"": ""Enter your Api domain Eg.( pingone.com,pingone.eu etc )depending on the region credentials created for "", ""type"": ""text"", ""name"": ""apidomain"", ""required"": true}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": false, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PingOne/Data%20Connectors/PingOneAuditLogs_ccp/PingOneAuditLogs_DataConnectorDefinition.json","true" +"PostgreSQL_CL","PostgreSQL","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PostgreSQL","azuresentinel","azure-sentinel-solution-postgresql","2022-06-27","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PostgreSQL","PostgreSQL","[Deprecated] PostgreSQL Events","PostgreSQL data connector provides the capability to ingest [PostgreSQL](https://www.postgresql.org/) events into Microsoft Sentinel. Refer to [PostgreSQL documentation](https://www.postgresql.org/docs/current/index.html) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on PostgreSQL parser based on a Kusto Function to work as expected. This parser is installed along with solution installation."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Tomcat Server where the logs are generated.\n\n> Logs from PostgreSQL Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure PostgreSQL to write logs to files"", ""description"": ""1. Edit postgresql.conf file to write logs to files:\n\n>**log_destination** = 'stderr'\n\n>**logging_collector** = on\n\nSet the following parameters: **log_directory** and **log_filename**. Refer to the [PostgreSQL documentation for more details](https://www.postgresql.org/docs/current/runtime-config-logging.html)""}, {""title"": ""3. Configure the logs to be collected"", ""description"": ""Configure the custom log directory to be collected"", ""instructions"": [{""parameters"": {""linkType"": ""OpenCustomLogsSettings""}, ""type"": ""InstallAgent""}]}, {""title"": """", ""description"": ""1. Select the link above to open your workspace advanced settings \n2. From the left pane, select **Settings**, select **Custom Logs** and click **+Add custom log**\n3. Click **Browse** to upload a sample of a PostgreSQL log file. Then, click **Next >**\n4. Select **Timestamp** as the record delimiter and click **Next >**\n5. Select **Windows** or **Linux** and enter the path to PostgreSQL logs based on your configuration(e.g. for some Linux distros the default path is /var/log/postgresql/) \n6. After entering the path, click the '+' symbol to apply, then click **Next >** \n7. Add **PostgreSQL** as the custom log Name (the '_CL' suffix will be added automatically) and click **Done**.""}, {""title"": ""Validate connectivity"", ""description"": ""It may take upwards of 20 minutes until your logs start to appear in Microsoft Sentinel.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PostgreSQL/Data%20Connectors/Connector_PostgreSQL.json","true" +"","Power Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Power%20Platform","","","","","","","","","","","","","","","","","","false" +"prancer_CL","Prancer PenSuiteAI Integration","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Prancer%20PenSuiteAI%20Integration","prancerenterprise1600813133757","microsoft-sentinel-solution-prancer","2023-08-02","","","Prancer PenSuiteAI Integration","Partner","https://www.prancer.io","","domains","PrancerLogData","Prancer","Prancer Data Connector","The Prancer Data Connector has provides the capability to ingest Prancer (CSPM)[https://docs.prancer.io/web/CSPM/] and [PAC](https://docs.prancer.io/web/PAC/introduction/) data to process through Microsoft Sentinel. Refer to [Prancer Documentation](https://docs.prancer.io/web) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Prancer REST API to pull logs into Microsoft sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""STEP 1: Follow the documentation on the [Prancer Documentation Site](https://docs.prancer.io/web/) in order to set up an scan with an azure cloud connector.""}, {""title"": """", ""description"": ""STEP 2: Once the scan is created go to the 'Third Part Integrations' menu for the scan and select Sentinel.""}, {""title"": """", ""description"": ""STEP 3: Create follow the configuration wizard to select where in Azure the results should be sent to.""}, {""title"": """", ""description"": ""STEP 4: Data should start to get fed into Microsoft Sentinel for processing.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Include custom pre-requisites if the connectivity requires - else delete customs"", ""description"": ""Description for any custom pre-requisite""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Prancer%20PenSuiteAI%20Integration/Data%20Connectors/PrancerLogData.json","true" +"ProofPointTAPClicksBlocked_CL","ProofPointTap","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap","proofpointinc1600438591120","azure-sentinel-proofpoint","2022-05-23","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointTAP","Proofpoint","[Deprecated] Proofpoint TAP","The [Proofpoint Targeted Attack Protection (TAP)](https://www.proofpoint.com/us/products/advanced-threat-protection/targeted-attack-protection) connector provides the capability to ingest Proofpoint TAP logs and events into Microsoft Sentinel. The connector provides visibility into Message and Click events in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Proofpoint TAP to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Proofpoint TAP API**\n\n1. Log into the Proofpoint TAP console \n2. Navigate to **Connect Applications** and select **Service Principal**\n3. Create a **Service Principal** (API Authorization Key)""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Proofpoint TAP connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Proofpoint TAP API Authorization Key(s), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Proofpoint TAP connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelproofpointtapazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelproofpointtapazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password**, and validate the **Uri**.\n> - The default URI is pulling data for the last 300 seconds (5 minutes) to correspond with the default Function App Timer trigger of 5 minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. \n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""This method provides the step-by-step instructions to deploy the Proofpoint TAP connector manually with Azure Function (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentinelproofpointtapazurefunctionzip) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following six (6) application settings individually, with their respective string values (case-sensitive): \n\t\tapiUsername\n\t\tapipassword\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\tlogAnalyticsUri (optional)\n> - Set the `uri` value to: `https://tap-api-v2.proofpoint.com/v2/siem/all?format=json&sinceSeconds=300`\n> - The default URI is pulling data for the last 300 seconds (5 minutes) to correspond with the default Function App Timer trigger of 5 minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly to prevent overlapping data ingestion.\n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`\n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Proofpoint TAP API Key"", ""description"": ""A Proofpoint TAP API username and password is required. [See the documentation to learn more about Proofpoint SIEM API](https://help.proofpoint.com/Threat_Insight_Dashboard/API_Documentation/SIEM_API).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap/Data%20Connectors/ProofpointTAP_API_FunctionApp.json","true" +"ProofPointTAPClicksPermitted_CL","ProofPointTap","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap","proofpointinc1600438591120","azure-sentinel-proofpoint","2022-05-23","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointTAP","Proofpoint","[Deprecated] Proofpoint TAP","The [Proofpoint Targeted Attack Protection (TAP)](https://www.proofpoint.com/us/products/advanced-threat-protection/targeted-attack-protection) connector provides the capability to ingest Proofpoint TAP logs and events into Microsoft Sentinel. The connector provides visibility into Message and Click events in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Proofpoint TAP to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Proofpoint TAP API**\n\n1. Log into the Proofpoint TAP console \n2. Navigate to **Connect Applications** and select **Service Principal**\n3. Create a **Service Principal** (API Authorization Key)""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Proofpoint TAP connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Proofpoint TAP API Authorization Key(s), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Proofpoint TAP connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelproofpointtapazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelproofpointtapazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password**, and validate the **Uri**.\n> - The default URI is pulling data for the last 300 seconds (5 minutes) to correspond with the default Function App Timer trigger of 5 minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. \n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""This method provides the step-by-step instructions to deploy the Proofpoint TAP connector manually with Azure Function (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentinelproofpointtapazurefunctionzip) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following six (6) application settings individually, with their respective string values (case-sensitive): \n\t\tapiUsername\n\t\tapipassword\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\tlogAnalyticsUri (optional)\n> - Set the `uri` value to: `https://tap-api-v2.proofpoint.com/v2/siem/all?format=json&sinceSeconds=300`\n> - The default URI is pulling data for the last 300 seconds (5 minutes) to correspond with the default Function App Timer trigger of 5 minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly to prevent overlapping data ingestion.\n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`\n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Proofpoint TAP API Key"", ""description"": ""A Proofpoint TAP API username and password is required. [See the documentation to learn more about Proofpoint SIEM API](https://help.proofpoint.com/Threat_Insight_Dashboard/API_Documentation/SIEM_API).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap/Data%20Connectors/ProofpointTAP_API_FunctionApp.json","true" +"ProofPointTAPMessagesBlocked_CL","ProofPointTap","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap","proofpointinc1600438591120","azure-sentinel-proofpoint","2022-05-23","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointTAP","Proofpoint","[Deprecated] Proofpoint TAP","The [Proofpoint Targeted Attack Protection (TAP)](https://www.proofpoint.com/us/products/advanced-threat-protection/targeted-attack-protection) connector provides the capability to ingest Proofpoint TAP logs and events into Microsoft Sentinel. The connector provides visibility into Message and Click events in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Proofpoint TAP to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Proofpoint TAP API**\n\n1. Log into the Proofpoint TAP console \n2. Navigate to **Connect Applications** and select **Service Principal**\n3. Create a **Service Principal** (API Authorization Key)""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Proofpoint TAP connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Proofpoint TAP API Authorization Key(s), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Proofpoint TAP connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelproofpointtapazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelproofpointtapazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password**, and validate the **Uri**.\n> - The default URI is pulling data for the last 300 seconds (5 minutes) to correspond with the default Function App Timer trigger of 5 minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. \n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""This method provides the step-by-step instructions to deploy the Proofpoint TAP connector manually with Azure Function (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentinelproofpointtapazurefunctionzip) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following six (6) application settings individually, with their respective string values (case-sensitive): \n\t\tapiUsername\n\t\tapipassword\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\tlogAnalyticsUri (optional)\n> - Set the `uri` value to: `https://tap-api-v2.proofpoint.com/v2/siem/all?format=json&sinceSeconds=300`\n> - The default URI is pulling data for the last 300 seconds (5 minutes) to correspond with the default Function App Timer trigger of 5 minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly to prevent overlapping data ingestion.\n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`\n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Proofpoint TAP API Key"", ""description"": ""A Proofpoint TAP API username and password is required. [See the documentation to learn more about Proofpoint SIEM API](https://help.proofpoint.com/Threat_Insight_Dashboard/API_Documentation/SIEM_API).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap/Data%20Connectors/ProofpointTAP_API_FunctionApp.json","true" +"ProofPointTAPMessagesDelivered_CL","ProofPointTap","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap","proofpointinc1600438591120","azure-sentinel-proofpoint","2022-05-23","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointTAP","Proofpoint","[Deprecated] Proofpoint TAP","The [Proofpoint Targeted Attack Protection (TAP)](https://www.proofpoint.com/us/products/advanced-threat-protection/targeted-attack-protection) connector provides the capability to ingest Proofpoint TAP logs and events into Microsoft Sentinel. The connector provides visibility into Message and Click events in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Proofpoint TAP to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Proofpoint TAP API**\n\n1. Log into the Proofpoint TAP console \n2. Navigate to **Connect Applications** and select **Service Principal**\n3. Create a **Service Principal** (API Authorization Key)""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Proofpoint TAP connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Proofpoint TAP API Authorization Key(s), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Proofpoint TAP connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelproofpointtapazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelproofpointtapazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password**, and validate the **Uri**.\n> - The default URI is pulling data for the last 300 seconds (5 minutes) to correspond with the default Function App Timer trigger of 5 minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. \n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""This method provides the step-by-step instructions to deploy the Proofpoint TAP connector manually with Azure Function (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentinelproofpointtapazurefunctionzip) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following six (6) application settings individually, with their respective string values (case-sensitive): \n\t\tapiUsername\n\t\tapipassword\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\tlogAnalyticsUri (optional)\n> - Set the `uri` value to: `https://tap-api-v2.proofpoint.com/v2/siem/all?format=json&sinceSeconds=300`\n> - The default URI is pulling data for the last 300 seconds (5 minutes) to correspond with the default Function App Timer trigger of 5 minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly to prevent overlapping data ingestion.\n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`\n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Proofpoint TAP API Key"", ""description"": ""A Proofpoint TAP API username and password is required. [See the documentation to learn more about Proofpoint SIEM API](https://help.proofpoint.com/Threat_Insight_Dashboard/API_Documentation/SIEM_API).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap/Data%20Connectors/ProofpointTAP_API_FunctionApp.json","true" +"ProofPointTAPClicksBlockedV2_CL","ProofPointTap","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap","proofpointinc1600438591120","azure-sentinel-proofpoint","2022-05-23","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointTAPv2","Proofpoint","Proofpoint TAP (via Codeless Connector Platform)","The [Proofpoint Targeted Attack Protection (TAP)](https://www.proofpoint.com/us/products/advanced-threat-protection/targeted-attack-protection) connector provides the capability to ingest Proofpoint TAP logs and events into Microsoft Sentinel. The connector provides visibility into Message and Click events in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.","[{""description"": ""**Configuration steps for the Proofpoint TAP API**\n\n1. Log into the [Proofpoint TAP dashboard](https://threatinsight.proofpoint.com/) \n2. Navigate to **Settings** and go to **Connected Applications** tab \n 3. Click on **Create New Credential** \n 4. Provide a name and click **Generate** \n 5. Copy **Service Principal** and **Secret** values""}, {""description"": "">**NOTE:** This connector depends on a parser based on Kusto Function to work as expected [**ProofpointTAPEvent**](https://aka.ms/sentinel-ProofpointTAPDataConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Service Principal"", ""placeholder"": ""123456"", ""type"": ""text"", ""name"": ""username""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Secret"", ""placeholder"": ""123456"", ""type"": ""password"", ""name"": ""password""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""disconnectLabel"": ""Disconnect"", ""name"": ""connectionToggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Proofpoint TAP API Key"", ""description"": ""A Proofpoint TAP API service principal and secret is required to access Proofpoint's SIEM API. [See the documentation to learn more about Proofpoint SIEM API](https://help.proofpoint.com/Threat_Insight_Dashboard/API_Documentation/SIEM_API).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap/Data%20Connectors/ProofpointTAP_CCP/ProofpointTAP_defination.json","true" +"ProofPointTAPClicksPermittedV2_CL","ProofPointTap","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap","proofpointinc1600438591120","azure-sentinel-proofpoint","2022-05-23","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointTAPv2","Proofpoint","Proofpoint TAP (via Codeless Connector Platform)","The [Proofpoint Targeted Attack Protection (TAP)](https://www.proofpoint.com/us/products/advanced-threat-protection/targeted-attack-protection) connector provides the capability to ingest Proofpoint TAP logs and events into Microsoft Sentinel. The connector provides visibility into Message and Click events in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.","[{""description"": ""**Configuration steps for the Proofpoint TAP API**\n\n1. Log into the [Proofpoint TAP dashboard](https://threatinsight.proofpoint.com/) \n2. Navigate to **Settings** and go to **Connected Applications** tab \n 3. Click on **Create New Credential** \n 4. Provide a name and click **Generate** \n 5. Copy **Service Principal** and **Secret** values""}, {""description"": "">**NOTE:** This connector depends on a parser based on Kusto Function to work as expected [**ProofpointTAPEvent**](https://aka.ms/sentinel-ProofpointTAPDataConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Service Principal"", ""placeholder"": ""123456"", ""type"": ""text"", ""name"": ""username""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Secret"", ""placeholder"": ""123456"", ""type"": ""password"", ""name"": ""password""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""disconnectLabel"": ""Disconnect"", ""name"": ""connectionToggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Proofpoint TAP API Key"", ""description"": ""A Proofpoint TAP API service principal and secret is required to access Proofpoint's SIEM API. [See the documentation to learn more about Proofpoint SIEM API](https://help.proofpoint.com/Threat_Insight_Dashboard/API_Documentation/SIEM_API).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap/Data%20Connectors/ProofpointTAP_CCP/ProofpointTAP_defination.json","true" +"ProofPointTAPMessagesBlockedV2_CL","ProofPointTap","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap","proofpointinc1600438591120","azure-sentinel-proofpoint","2022-05-23","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointTAPv2","Proofpoint","Proofpoint TAP (via Codeless Connector Platform)","The [Proofpoint Targeted Attack Protection (TAP)](https://www.proofpoint.com/us/products/advanced-threat-protection/targeted-attack-protection) connector provides the capability to ingest Proofpoint TAP logs and events into Microsoft Sentinel. The connector provides visibility into Message and Click events in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.","[{""description"": ""**Configuration steps for the Proofpoint TAP API**\n\n1. Log into the [Proofpoint TAP dashboard](https://threatinsight.proofpoint.com/) \n2. Navigate to **Settings** and go to **Connected Applications** tab \n 3. Click on **Create New Credential** \n 4. Provide a name and click **Generate** \n 5. Copy **Service Principal** and **Secret** values""}, {""description"": "">**NOTE:** This connector depends on a parser based on Kusto Function to work as expected [**ProofpointTAPEvent**](https://aka.ms/sentinel-ProofpointTAPDataConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Service Principal"", ""placeholder"": ""123456"", ""type"": ""text"", ""name"": ""username""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Secret"", ""placeholder"": ""123456"", ""type"": ""password"", ""name"": ""password""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""disconnectLabel"": ""Disconnect"", ""name"": ""connectionToggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Proofpoint TAP API Key"", ""description"": ""A Proofpoint TAP API service principal and secret is required to access Proofpoint's SIEM API. [See the documentation to learn more about Proofpoint SIEM API](https://help.proofpoint.com/Threat_Insight_Dashboard/API_Documentation/SIEM_API).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap/Data%20Connectors/ProofpointTAP_CCP/ProofpointTAP_defination.json","true" +"ProofPointTAPMessagesDeliveredV2_CL","ProofPointTap","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap","proofpointinc1600438591120","azure-sentinel-proofpoint","2022-05-23","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointTAPv2","Proofpoint","Proofpoint TAP (via Codeless Connector Platform)","The [Proofpoint Targeted Attack Protection (TAP)](https://www.proofpoint.com/us/products/advanced-threat-protection/targeted-attack-protection) connector provides the capability to ingest Proofpoint TAP logs and events into Microsoft Sentinel. The connector provides visibility into Message and Click events in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.","[{""description"": ""**Configuration steps for the Proofpoint TAP API**\n\n1. Log into the [Proofpoint TAP dashboard](https://threatinsight.proofpoint.com/) \n2. Navigate to **Settings** and go to **Connected Applications** tab \n 3. Click on **Create New Credential** \n 4. Provide a name and click **Generate** \n 5. Copy **Service Principal** and **Secret** values""}, {""description"": "">**NOTE:** This connector depends on a parser based on Kusto Function to work as expected [**ProofpointTAPEvent**](https://aka.ms/sentinel-ProofpointTAPDataConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Service Principal"", ""placeholder"": ""123456"", ""type"": ""text"", ""name"": ""username""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Secret"", ""placeholder"": ""123456"", ""type"": ""password"", ""name"": ""password""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""disconnectLabel"": ""Disconnect"", ""name"": ""connectionToggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Proofpoint TAP API Key"", ""description"": ""A Proofpoint TAP API service principal and secret is required to access Proofpoint's SIEM API. [See the documentation to learn more about Proofpoint SIEM API](https://help.proofpoint.com/Threat_Insight_Dashboard/API_Documentation/SIEM_API).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap/Data%20Connectors/ProofpointTAP_CCP/ProofpointTAP_defination.json","true" +"ProofpointPODMailLog_CL","Proofpoint On demand(POD) Email Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security","proofpointinc1600438591120","azure-sentinel-proofpointpod","2021-03-31","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointCCPDefinition","Proofpoint","Proofpoint On Demand Email Security (via Codeless Connector Platform)","Proofpoint On Demand Email Security data connector provides the capability to get Proofpoint on Demand Email Protection data, allows users to check message traceability, monitoring into email activity, threats,and data exfiltration by attackers and malicious insiders. The connector provides ability to review events in your org on an accelerated basis, get event log files in hourly increments for recent activity.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Proofpoint POD Websocket API \n #### The PoD Log API does not allow use of the same token for more than one session at the same time, so make sure your token isn't used anywhere. \n Proofpoint Websocket API service requires Remote Syslog Forwarding license. Please refer the [documentation](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API) on how to enable and check PoD Log API. \n You must provide your cluster id and security token.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve the cluster id\n 1.1. Log in to the [proofpoint](https://admin.proofpoint.com/) [**Management Console**] with Admin user credentials\n\n 1.2. In the **Management Console**, the cluster id is displayed in the upper-right corner.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve the API token\n 2.1. Log in to the [proofpoint](https://admin.proofpoint.com/) [**Management Console**] with Admin user credentials\n\n 2.2. In the **Management Console**, click **Settings** -> **API Key Management** \n\n 2.3. Under **API Key Management** click on the **PoD Logging** tab.\n\n 2.4. Get or create a new API key.""}}, {""parameters"": {""label"": ""Cluster Id"", ""placeholder"": ""cluster_id"", ""type"": ""text"", ""name"": ""clusterId""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Key"", ""placeholder"": ""API Key"", ""type"": ""text"", ""name"": ""apiKey""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}], ""customs"": [{""name"": ""Websocket API Credentials/permissions"", ""description"": ""**ProofpointClusterID**, and **ProofpointToken** are required. [See the documentation to learn more about API](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security/Data%20Connectors/ProofPointEmailSecurity_CCP/ProofpointPOD_Definaton.json","true" +"ProofpointPODMessage_CL","Proofpoint On demand(POD) Email Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security","proofpointinc1600438591120","azure-sentinel-proofpointpod","2021-03-31","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointCCPDefinition","Proofpoint","Proofpoint On Demand Email Security (via Codeless Connector Platform)","Proofpoint On Demand Email Security data connector provides the capability to get Proofpoint on Demand Email Protection data, allows users to check message traceability, monitoring into email activity, threats,and data exfiltration by attackers and malicious insiders. The connector provides ability to review events in your org on an accelerated basis, get event log files in hourly increments for recent activity.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Proofpoint POD Websocket API \n #### The PoD Log API does not allow use of the same token for more than one session at the same time, so make sure your token isn't used anywhere. \n Proofpoint Websocket API service requires Remote Syslog Forwarding license. Please refer the [documentation](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API) on how to enable and check PoD Log API. \n You must provide your cluster id and security token.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve the cluster id\n 1.1. Log in to the [proofpoint](https://admin.proofpoint.com/) [**Management Console**] with Admin user credentials\n\n 1.2. In the **Management Console**, the cluster id is displayed in the upper-right corner.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve the API token\n 2.1. Log in to the [proofpoint](https://admin.proofpoint.com/) [**Management Console**] with Admin user credentials\n\n 2.2. In the **Management Console**, click **Settings** -> **API Key Management** \n\n 2.3. Under **API Key Management** click on the **PoD Logging** tab.\n\n 2.4. Get or create a new API key.""}}, {""parameters"": {""label"": ""Cluster Id"", ""placeholder"": ""cluster_id"", ""type"": ""text"", ""name"": ""clusterId""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Key"", ""placeholder"": ""API Key"", ""type"": ""text"", ""name"": ""apiKey""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}], ""customs"": [{""name"": ""Websocket API Credentials/permissions"", ""description"": ""**ProofpointClusterID**, and **ProofpointToken** are required. [See the documentation to learn more about API](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security/Data%20Connectors/ProofPointEmailSecurity_CCP/ProofpointPOD_Definaton.json","true" +"ProofpointPODMessage_CL","Proofpoint On demand(POD) Email Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security","proofpointinc1600438591120","azure-sentinel-proofpointpod","2021-03-31","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointPOD","Proofpoint","[Deprecated] Proofpoint On Demand Email Security","Proofpoint On Demand Email Security data connector provides the capability to get Proofpoint on Demand Email Protection data, allows users to check message traceability, monitoring into email activity, threats,and data exfiltration by attackers and malicious insiders. The connector provides ability to review events in your org on an accelerated basis, get event log files in hourly increments for recent activity.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Proofpoint Websocket API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-proofpointpod-parser) to create the Kusto functions alias, **ProofpointPOD**""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Proofpoint Websocket API**\n\n1. Proofpoint Websocket API service requires Remote Syslog Forwarding license. Please refer the [documentation](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API) on how to enable and check PoD Log API. \n2. You must provide your cluster id and security token.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Proofpoint On Demand Email Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Proofpoint POD Log API credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Proofpoint On Demand Email Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-proofpointpod-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ProofpointClusterID**, **ProofpointToken** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Proofpoint On Demand Email Security data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> NOTE:You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-proofpointpod-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ProofpointXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tProofpointClusterID\n\t\tProofpointToken\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Websocket API Credentials/permissions"", ""description"": ""**ProofpointClusterID**, **ProofpointToken** is required. [See the documentation to learn more about API](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security/Data%20Connectors/ProofpointPOD_API_FunctionApp.json","true" +"ProofpointPOD_maillog_CL","Proofpoint On demand(POD) Email Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security","proofpointinc1600438591120","azure-sentinel-proofpointpod","2021-03-31","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointPOD","Proofpoint","[Deprecated] Proofpoint On Demand Email Security","Proofpoint On Demand Email Security data connector provides the capability to get Proofpoint on Demand Email Protection data, allows users to check message traceability, monitoring into email activity, threats,and data exfiltration by attackers and malicious insiders. The connector provides ability to review events in your org on an accelerated basis, get event log files in hourly increments for recent activity.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Proofpoint Websocket API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-proofpointpod-parser) to create the Kusto functions alias, **ProofpointPOD**""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Proofpoint Websocket API**\n\n1. Proofpoint Websocket API service requires Remote Syslog Forwarding license. Please refer the [documentation](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API) on how to enable and check PoD Log API. \n2. You must provide your cluster id and security token.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Proofpoint On Demand Email Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Proofpoint POD Log API credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Proofpoint On Demand Email Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-proofpointpod-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ProofpointClusterID**, **ProofpointToken** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Proofpoint On Demand Email Security data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> NOTE:You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-proofpointpod-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ProofpointXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tProofpointClusterID\n\t\tProofpointToken\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Websocket API Credentials/permissions"", ""description"": ""**ProofpointClusterID**, **ProofpointToken** is required. [See the documentation to learn more about API](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security/Data%20Connectors/ProofpointPOD_API_FunctionApp.json","true" +"ProofpointPOD_message_CL","Proofpoint On demand(POD) Email Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security","proofpointinc1600438591120","azure-sentinel-proofpointpod","2021-03-31","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointPOD","Proofpoint","[Deprecated] Proofpoint On Demand Email Security","Proofpoint On Demand Email Security data connector provides the capability to get Proofpoint on Demand Email Protection data, allows users to check message traceability, monitoring into email activity, threats,and data exfiltration by attackers and malicious insiders. The connector provides ability to review events in your org on an accelerated basis, get event log files in hourly increments for recent activity.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Proofpoint Websocket API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-proofpointpod-parser) to create the Kusto functions alias, **ProofpointPOD**""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Proofpoint Websocket API**\n\n1. Proofpoint Websocket API service requires Remote Syslog Forwarding license. Please refer the [documentation](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API) on how to enable and check PoD Log API. \n2. You must provide your cluster id and security token.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Proofpoint On Demand Email Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Proofpoint POD Log API credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Proofpoint On Demand Email Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-proofpointpod-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ProofpointClusterID**, **ProofpointToken** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Proofpoint On Demand Email Security data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> NOTE:You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-proofpointpod-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ProofpointXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tProofpointClusterID\n\t\tProofpointToken\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Websocket API Credentials/permissions"", ""description"": ""**ProofpointClusterID**, **ProofpointToken** is required. [See the documentation to learn more about API](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security/Data%20Connectors/ProofpointPOD_API_FunctionApp.json","true" +"maillog_CL","Proofpoint On demand(POD) Email Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security","proofpointinc1600438591120","azure-sentinel-proofpointpod","2021-03-31","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointPOD","Proofpoint","[Deprecated] Proofpoint On Demand Email Security","Proofpoint On Demand Email Security data connector provides the capability to get Proofpoint on Demand Email Protection data, allows users to check message traceability, monitoring into email activity, threats,and data exfiltration by attackers and malicious insiders. The connector provides ability to review events in your org on an accelerated basis, get event log files in hourly increments for recent activity.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Proofpoint Websocket API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-proofpointpod-parser) to create the Kusto functions alias, **ProofpointPOD**""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Proofpoint Websocket API**\n\n1. Proofpoint Websocket API service requires Remote Syslog Forwarding license. Please refer the [documentation](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API) on how to enable and check PoD Log API. \n2. You must provide your cluster id and security token.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Proofpoint On Demand Email Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Proofpoint POD Log API credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Proofpoint On Demand Email Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-proofpointpod-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ProofpointClusterID**, **ProofpointToken** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Proofpoint On Demand Email Security data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> NOTE:You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-proofpointpod-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ProofpointXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tProofpointClusterID\n\t\tProofpointToken\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Websocket API Credentials/permissions"", ""description"": ""**ProofpointClusterID**, **ProofpointToken** is required. [See the documentation to learn more about API](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security/Data%20Connectors/ProofpointPOD_API_FunctionApp.json","true" +"Syslog","Pulse Connect Secure","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Pulse%20Connect%20Secure","azuresentinel","azure-sentinel-solution-pulseconnectsecure","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PulseConnectSecure","Pulse Secure","[Deprecated] Pulse Connect Secure","The [Pulse Connect Secure](https://www.pulsesecure.net/products/pulse-connect-secure/) connector allows you to easily connect your Pulse Connect Secure logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigations. Integrating Pulse Connect Secure with Microsoft Sentinel provides more insight into your organization's network and improves your security operation capabilities.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Pulse Connect Secure and load the function code or click [here](https://aka.ms/sentinel-PulseConnectSecure-parser), on the second line of the query, enter the hostname(s) of your Pulse Connect Secure device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n 2. Select **Apply below configuration to my machines** and select the facilities and severities.\n 3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure and connect the Pulse Connect Secure"", ""description"": ""[Follow the instructions](https://help.ivanti.com/ps/help/en_US/PPS/9.1R13/ag/configuring_an_external_syslog_server.htm) to enable syslog streaming of Pulse Connect Secure logs. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Pulse Connect Secure"", ""description"": ""must be configured to export logs via Syslog""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Pulse%20Connect%20Secure/Data%20Connectors/Connector_Syslog_PulseConnectSecure.json","true" +"","Pure Storage","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Pure%20Storage","purestoragemarketplaceadmin","microsoft-sentinel-solution-purestorage","2024-02-05","","","purestoragemarketplaceadmin","Partner","https://support.purestorage.com","","domains","","","","","","","","false" +"QualysKB_CL","Qualys VM Knowledgebase","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Qualys%20VM%20Knowledgebase","azuresentinel","azure-sentinel-solution-qualysvmknowledgebase","2022-05-17","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","QualysKB","Qualys","Qualys VM KnowledgeBase","The [Qualys Vulnerability Management (VM)](https://www.qualys.com/apps/vulnerability-management/) KnowledgeBase (KB) connector provides the capability to ingest the latest vulnerability data from the Qualys KB into Microsoft Sentinel.

This data can used to correlate and enrich vulnerability detections found by the [Qualys Vulnerability Management (VM)](https://docs.microsoft.com/azure/sentinel/connect-qualys-vm) data connector.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias QualysVM Knowledgebase and load the function code or click [here](https://aka.ms/sentinel-crowdstrikefalconendpointprotection-parser), on the second line of the query, enter the hostname(s) of your QualysVM Knowledgebase device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected. [Follow the steps](https://aka.ms/sentinel-qualyskb-parser) to use the Kusto function alias, **QualysKB**""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Qualys API**\n\n1. Log into the Qualys Vulnerability Management console with an administrator account, select the **Users** tab and the **Users** subtab. \n2. Click on the **New** drop-down menu and select **Users**.\n3. Create a username and password for the API account. \n4. In the **User Roles** tab, ensure the account role is set to **Manager** and access is allowed to **GUI** and **API**\n4. Log out of the administrator account and log into the console with the new API credentials for validation, then log out of the API account. \n5. Log back into the console using an administrator account and modify the API accounts User Roles, removing access to **GUI**. \n6. Save all changes.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Qualys KB connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Qualys API username and password, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Qualys KB connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-qualyskb-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-qualyskb-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password** , update the **URI**, and any additional URI **Filter Parameters** (This value should include a \""&\"" symbol between each parameter and should not include any spaces) \n> - Enter the URI that corresponds to your region. The complete list of API Server URLs can be [found here](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf#G4.735348)\n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n - Note: If deployment failed due to the storage account name being taken, change the **Function Name** to a unique value and redeploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""This method provides the step-by-step instructions to deploy the Qualys KB connector manually with Azure Function."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentinel-qualyskb-functioncode) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following seven (7) application settings individually, with their respective string values (case-sensitive): \n\t\tapiUsername\n\t\tapiPassword\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\tfilterParameters\n\t\tlogAnalyticsUri (optional)\n> - Enter the URI that corresponds to your region. The complete list of API Server URLs can be [found here](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf#G4.735348). The `uri` value must follow the following schema: `https:///api/2.0` \n> - Add any additional filter parameters, for the `filterParameters` variable, that need to be appended to the URI. The `filterParameter` value should include a \""&\"" symbol between each parameter and should not include any spaces.\n> - Note: If using Azure Key Vault, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n - Use logAnalyticsUri to override the log analytics API endpoint for delegated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Qualys API Key"", ""description"": ""A Qualys VM API username and password is required. [See the documentation to learn more about Qualys VM API](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Qualys%20VM%20Knowledgebase/Data%20Connectors/QualysKB_API_FunctionApp.json","true" +"QualysHostDetectionV3_CL","QualysVM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/QualysVM","azuresentinel","azure-sentinel-qualysvm","2020-12-14","2025-11-18","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","QualysVMLogsCCPDefinition","Microsoft","Qualys Vulnerability Management (via Codeless Connector Framework)","The [Qualys Vulnerability Management (VM)](https://www.qualys.com/apps/vulnerability-management/) data connector provides the capability to ingest vulnerability host detection data into Microsoft Sentinel through the Qualys API. The connector provides visibility into host detection data from vulerability scans.","[{""title"": ""Connect Qualys Vulnerability Management to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** To gather data for Detections based on Host, expand the **DetectionList** column in the table.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""To gather data from Qualys VM, you need to provide the following resources""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. API Credentials \n To gather data from Qualys VM, you'll need Qualys API credentials, including your Username and Password.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. API Server URL \n To gather data from Qualys VM, you'll need the Qualys API server URL specific to your region. You can find the exact API server URL for your region [here](https://www.qualys.com/platform-identification/#api-urls)""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Qualys API User Name"", ""placeholder"": ""Enter UserName"", ""type"": ""text"", ""name"": ""username"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Qualys API Password"", ""placeholder"": ""Enter password"", ""type"": ""password"", ""name"": ""password"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Qualys API Server URL"", ""placeholder"": ""Enter API Server URL"", ""type"": ""text"", ""name"": ""apiServerUrl"", ""required"": true, ""description"": ""Ensure the API Server URL starts with https:// and paste the whole API Server URL without / at the ending""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Truncation Limit \n Configure the maximum number of host records to retrieve per API call (20-5000 range). Higher values may improve performance but could impact API response times.""}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Truncation Limit"", ""name"": ""truncationLimit"", ""options"": [{""key"": ""1000"", ""text"": ""1000 - API default value""}, {""key"": ""20"", ""text"": ""20 - Minimal load, slower collection""}, {""key"": ""100"", ""text"": ""100 - Low load""}, {""key"": ""500"", ""text"": ""500 - Moderate load""}, {""key"": ""2500"", ""text"": ""2500 - High load, faster collection""}, {""key"": ""5000"", ""text"": ""5000 - Maximum load, fastest collection""}], ""placeholder"": ""Select truncation limit"", ""isMultiSelect"": false, ""required"": true}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}], ""customs"": [{""name"": ""API access and roles"", ""description"": ""Ensure the Qualys VM user has a role of Reader or higher. If the role is Reader, ensure that API access is enabled for the account. Auditor role is not supported to access the API. For more details, refer to the Qualys VM [Host Detection API](https://docs.qualys.com/en/vm/qweb-all-api/mergedProjects/qapi-assets/host_lists/host_detection.htm#v_3_0) and [User role Comparison](https://qualysguard.qualys.com/qwebhelp/fo_portal/user_accounts/user_roles_comparison_vm.htm) document.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/QualysVM/Data%20Connectors/QualysVMHostLogs_ccp/QualysVMHostLogs_ConnectorDefinition.json","true" +"QualysHostDetectionV2_CL","QualysVM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/QualysVM","azuresentinel","azure-sentinel-qualysvm","2020-12-14","2025-11-18","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","QualysVulnerabilityManagement","Qualys","[DEPRECATED] Qualys Vulnerability Management","The [Qualys Vulnerability Management (VM)](https://www.qualys.com/apps/vulnerability-management/) data connector provides the capability to ingest vulnerability host detection data into Microsoft Sentinel through the Qualys API. The connector provides visibility into host detection data from vulerability scans. This connector provides Microsoft Sentinel the capability to view dashboards, create custom alerts, and improve investigation

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Qualys VM to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Qualys VM API**\n\n1. Log into the Qualys Vulnerability Management console with an administrator account, select the **Users** tab and the **Users** subtab. \n2. Click on the **New** drop-down menu and select **Users..**\n3. Create a username and password for the API account. \n4. In the **User Roles** tab, ensure the account role is set to **Manager** and access is allowed to **GUI** and **API**\n4. Log out of the administrator account and log into the console with the new API credentials for validation, then log out of the API account. \n5. Log back into the console using an administrator account and modify the API accounts User Roles, removing access to **GUI**. \n6. Save all changes.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Qualys VM connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Qualys VM API Authorization Key(s), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated, if you have previously deployed an earlier version, and want to update, please delete the existing Qualys VM Azure Function before redeploying this version. Please use Qualys V2 version Workbook, detections. ""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Qualys VM connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-QualysVM-azuredeployV2) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-QualysVM-azuredeployV2-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password** , update the **URI**, and any additional URI **Filter Parameters** (each filter should be separated by an \""&\"" symbol, no spaces.) \n> - Enter the URI that corresponds to your region. The complete list of API Server URLs can be [found here](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf#G4.735348) -- There is no need to add a time suffix to the URI, the Function App will dynamically append the Time Value to the URI in the proper format. \n - The default **Time Interval** is set to pull the last five (5) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. \n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Quayls VM connector manually with Azure Functions.""}, {""title"": """", ""description"": ""**1. Create a Function App**\n\n1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp), and select **+ Add**.\n2. In the **Basics** tab, ensure Runtime stack is set to **Powershell Core**. \n3. In the **Hosting** tab, ensure the **Consumption (Serverless)** plan type is selected.\n4. Make other preferrable configuration changes, if needed, then click **Create**.""}, {""title"": """", ""description"": ""**2. Import Function App Code**\n\n1. In the newly created Function App, select **Functions** on the left pane and click **+ New Function**.\n2. Select **Timer Trigger**.\n3. Enter a unique Function **Name** and leave the default cron schedule of every 5 minutes, then click **Create**.\n5. Click on **Code + Test** on the left pane. \n6. Copy the [Function App Code](https://aka.ms/sentinel-QualysVM-functioncodeV2) and paste into the Function App `run.ps1` editor.\n7. Click **Save**.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following eight (8) application settings individually, with their respective string values (case-sensitive): \n\t\tapiUsername\n\t\tapiPassword\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\tfilterParameters\n\t\ttimeInterval\n\t\tlogAnalyticsUri (optional)\n> - Enter the URI that corresponds to your region. The complete list of API Server URLs can be [found here](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf#G4.735348). The `uri` value must follow the following schema: `https:///api/2.0/fo/asset/host/vm/detection/?action=list&vm_processed_after=` -- There is no need to add a time suffix to the URI, the Function App will dynamically append the Time Value to the URI in the proper format.\n> - Add any additional filter parameters, for the `filterParameters` variable, that need to be appended to the URI. Each parameter should be seperated by an \""&\"" symbol and should not include any spaces.\n> - Set the `timeInterval` (in minutes) to the value of `5` to correspond to the Timer Trigger of every `5` minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly to prevent overlapping data ingestion.\n> - Note: If using Azure Key Vault, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}, {""title"": """", ""description"": ""**4. Configure the host.json**.\n\nDue to the potentially large amount of Qualys host detection data being ingested, it can cause the execution time to surpass the default Function App timeout of five (5) minutes. Increase the default timeout duration to the maximum of ten (10) minutes, under the Consumption Plan, to allow more time for the Function App to execute.\n\n1. In the Function App, select the Function App Name and select the **App Service Editor** blade.\n2. Click **Go** to open the editor, then select the **host.json** file under the **wwwroot** directory.\n3. Add the line `\""functionTimeout\"": \""00:10:00\"",` above the `managedDependancy` line \n4. Ensure **SAVED** appears on the top right corner of the editor, then exit the editor.\n\n> NOTE: If a longer timeout duration is required, consider upgrading to an [App Service Plan](https://docs.microsoft.com/azure/azure-functions/functions-scale#timeout)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Qualys API Key"", ""description"": ""A Qualys VM API username and password is required. [See the documentation to learn more about Qualys VM API](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/QualysVM/Data%20Connectors/QualysVM_API_FunctionApp.json","true" +"QualysHostDetection_CL","QualysVM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/QualysVM","azuresentinel","azure-sentinel-qualysvm","2020-12-14","2025-11-18","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","QualysVulnerabilityManagement","Qualys","[DEPRECATED] Qualys Vulnerability Management","The [Qualys Vulnerability Management (VM)](https://www.qualys.com/apps/vulnerability-management/) data connector provides the capability to ingest vulnerability host detection data into Microsoft Sentinel through the Qualys API. The connector provides visibility into host detection data from vulerability scans. This connector provides Microsoft Sentinel the capability to view dashboards, create custom alerts, and improve investigation

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Qualys VM to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Qualys VM API**\n\n1. Log into the Qualys Vulnerability Management console with an administrator account, select the **Users** tab and the **Users** subtab. \n2. Click on the **New** drop-down menu and select **Users..**\n3. Create a username and password for the API account. \n4. In the **User Roles** tab, ensure the account role is set to **Manager** and access is allowed to **GUI** and **API**\n4. Log out of the administrator account and log into the console with the new API credentials for validation, then log out of the API account. \n5. Log back into the console using an administrator account and modify the API accounts User Roles, removing access to **GUI**. \n6. Save all changes.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Qualys VM connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Qualys VM API Authorization Key(s), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated, if you have previously deployed an earlier version, and want to update, please delete the existing Qualys VM Azure Function before redeploying this version. Please use Qualys V2 version Workbook, detections. ""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Qualys VM connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-QualysVM-azuredeployV2) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-QualysVM-azuredeployV2-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password** , update the **URI**, and any additional URI **Filter Parameters** (each filter should be separated by an \""&\"" symbol, no spaces.) \n> - Enter the URI that corresponds to your region. The complete list of API Server URLs can be [found here](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf#G4.735348) -- There is no need to add a time suffix to the URI, the Function App will dynamically append the Time Value to the URI in the proper format. \n - The default **Time Interval** is set to pull the last five (5) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. \n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Quayls VM connector manually with Azure Functions.""}, {""title"": """", ""description"": ""**1. Create a Function App**\n\n1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp), and select **+ Add**.\n2. In the **Basics** tab, ensure Runtime stack is set to **Powershell Core**. \n3. In the **Hosting** tab, ensure the **Consumption (Serverless)** plan type is selected.\n4. Make other preferrable configuration changes, if needed, then click **Create**.""}, {""title"": """", ""description"": ""**2. Import Function App Code**\n\n1. In the newly created Function App, select **Functions** on the left pane and click **+ New Function**.\n2. Select **Timer Trigger**.\n3. Enter a unique Function **Name** and leave the default cron schedule of every 5 minutes, then click **Create**.\n5. Click on **Code + Test** on the left pane. \n6. Copy the [Function App Code](https://aka.ms/sentinel-QualysVM-functioncodeV2) and paste into the Function App `run.ps1` editor.\n7. Click **Save**.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following eight (8) application settings individually, with their respective string values (case-sensitive): \n\t\tapiUsername\n\t\tapiPassword\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\tfilterParameters\n\t\ttimeInterval\n\t\tlogAnalyticsUri (optional)\n> - Enter the URI that corresponds to your region. The complete list of API Server URLs can be [found here](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf#G4.735348). The `uri` value must follow the following schema: `https:///api/2.0/fo/asset/host/vm/detection/?action=list&vm_processed_after=` -- There is no need to add a time suffix to the URI, the Function App will dynamically append the Time Value to the URI in the proper format.\n> - Add any additional filter parameters, for the `filterParameters` variable, that need to be appended to the URI. Each parameter should be seperated by an \""&\"" symbol and should not include any spaces.\n> - Set the `timeInterval` (in minutes) to the value of `5` to correspond to the Timer Trigger of every `5` minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly to prevent overlapping data ingestion.\n> - Note: If using Azure Key Vault, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}, {""title"": """", ""description"": ""**4. Configure the host.json**.\n\nDue to the potentially large amount of Qualys host detection data being ingested, it can cause the execution time to surpass the default Function App timeout of five (5) minutes. Increase the default timeout duration to the maximum of ten (10) minutes, under the Consumption Plan, to allow more time for the Function App to execute.\n\n1. In the Function App, select the Function App Name and select the **App Service Editor** blade.\n2. Click **Go** to open the editor, then select the **host.json** file under the **wwwroot** directory.\n3. Add the line `\""functionTimeout\"": \""00:10:00\"",` above the `managedDependancy` line \n4. Ensure **SAVED** appears on the top right corner of the editor, then exit the editor.\n\n> NOTE: If a longer timeout duration is required, consider upgrading to an [App Service Plan](https://docs.microsoft.com/azure/azure-functions/functions-scale#timeout)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Qualys API Key"", ""description"": ""A Qualys VM API username and password is required. [See the documentation to learn more about Qualys VM API](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/QualysVM/Data%20Connectors/QualysVM_API_FunctionApp.json","true" +"QscoutAppEvents_CL","Quokka","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Quokka","quokka","azure-sentinel-solution-quokka","2025-10-30","","","Quokka","Partner","https://www.quokka.io/contact-us#customer-support","","domains","QscoutAppEventsCCFDefinition","Quokka","QscoutAppEventsConnector","Ingest Qscout application events into Microsoft Sentinel","[{""description"": "">**NOTE:** This connector uses Codeless Connector Framework (CCF) to connect to the Qscout app events feed and ingest data into Microsoft Sentinel""}, {""description"": ""Provide the required values below:\n"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Qscout Organization ID"", ""placeholder"": ""123456"", ""type"": ""text"", ""name"": ""organizationId""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Qscout Organization API Key"", ""placeholder"": ""abcdxyz"", ""type"": ""text"", ""name"": ""apiKey""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required"", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true, ""read"": true}}], ""customs"": [{""name"": ""Qscout organization id"", ""description"": ""The API requires your organization ID in Qscout.""}, {""name"": ""Qscout organization API key"", ""description"": ""The API requires your organization API key in Qscout.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Quokka/Data%20Connectors/QuokkaQscoutAppEventsLogs_ccf/QuokkaQscoutAppEventsLogs_connectorDefinition.json","true" +"Syslog","RSA SecurID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RSA%20SecurID","azuresentinel","azure-sentinel-solution-securid","2021-09-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","RSASecurIDAM","RSA","[Deprecated] RSA® SecurID (Authentication Manager)","The [RSA® SecurID Authentication Manager](https://www.securid.com/) data connector provides the capability to ingest [RSA® SecurID Authentication Manager events](https://community.rsa.com/t5/rsa-authentication-manager/rsa-authentication-manager-log-messages/ta-p/630160) into Microsoft Sentinel. Refer to [RSA® SecurID Authentication Manager documentation](https://community.rsa.com/t5/rsa-authentication-manager/getting-started-with-rsa-authentication-manager/ta-p/569582) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**RSASecurIDAMEvent**](https://aka.ms/sentinel-rsasecuridam-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using RSA SecurID Authentication Manager version: 8.4 and 8.5"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Server where the RSA\u00ae SecurID Authentication Manager logs are forwarded.\n\n> Logs from RSA\u00ae SecurID Authentication Manager Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure RSA\u00ae SecurID Authentication Manager event forwarding"", ""description"": ""Follow the configuration steps below to get RSA\u00ae SecurID Authentication Manager logs into Microsoft Sentinel.\n1. [Follow these instructions](https://community.rsa.com/t5/rsa-authentication-manager/configure-the-remote-syslog-host-for-real-time-log-monitoring/ta-p/571374) to forward alerts from the Manager to a syslog server.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RSA%20SecurID/Data%20Connectors/RSASecurID.json","true" +"RSAIDPlus_AdminLogs_CL","RSAIDPlus_AdminLogs_Connector","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RSAIDPlus_AdminLogs_Connector","rsasecurity1687281258544","azure-sentinel-solution-rsa_id_plus_admin_log","2025-10-14","","","RSA Support Team","Partner","https://community.rsa.com/","","domains,verticals","RSAIDPlus_AdmingLogs_Connector","RSA","RSA ID Plus Admin Logs Connector","The RSA ID Plus AdminLogs Connector provides the capability to ingest [Cloud Admin Console Audit Events](https://community.rsa.com/s/article/Cloud-Administration-Event-Log-API-5d22ba17) into Microsoft Sentinel using Cloud Admin APIs.","[{""description"": "">**NOTE:** This connector uses Codeless Connector Framework (CCF) to connect to the RSA ID Plus Cloud Admin APIs to pull logs into Microsoft Sentinel.""}, {""title"": ""**STEP 1** - Create Legacy Admin API Client in Cloud Admin Console."", ""description"": ""Follow steps mentioned in this [page](https://community.rsa.com/s/article/Manage-Legacy-Clients-API-Keys-a89c9cbc#).""}, {""title"": ""**STEP 2** - Generate the Base64URL encoded JWT Token."", ""description"": ""Follow the steps mentioned in this [page](https://community.rsa.com/s/article/Authentication-for-the-Cloud-Administration-APIs-a04e3fb9) under the header 'Legacy Administration API'.""}, {""title"": ""**STEP 3** - Configure the Cloud Admin API to start ingesting Admin event logs into Microsoft Sentinel."", ""description"": ""Provide the required values below:\n"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Admin API URL"", ""placeholder"": ""https://.access.securid.com/AdminInterface/restapi/v1/adminlog/exportLogs"", ""type"": ""text"", ""name"": ""Admin-API-URL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""JWT Token"", ""placeholder"": ""Enter your JWT Token"", ""type"": ""password"", ""name"": ""access_token""}}]}, {""title"": ""**STEP 4** - Click Connect"", ""description"": ""Verify all the fields above were filled in correctly. Press Connect to start the connector."", ""instructions"": [{""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""RSA ID Plus API Authentication"", ""description"": ""To access the Admin APIs, a valid Base64URL encoded JWT token, signed with the client's Legacy Administration API key is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RSAIDPlus_AdminLogs_Connector/Data%20Connectors/RSIDPlus_AdminLogs_Connector_CCP/RSAIDPlus_AdminLogs_ConnectorDefinition.json","true" +"CommonSecurityLog","Radiflow","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Radiflow","radiflow","azure-sentinel-solution-radiflow","2024-06-26","","","Radiflow","Partner","https://www.radiflow.com","","domains","RadiflowIsid","Radiflow","Radiflow iSID via AMA","iSID enables non-disruptive monitoring of distributed ICS networks for changes in topology and behavior, using multiple security packages, each offering a unique capability pertaining to a specific type of network activity","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**RadiflowEvent**] which is deployed with the Microsoft Sentinel Solution."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade.\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule).\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy._\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine."", ""instructions"": []}, {""title"": ""Step B. Configure iSID to send logs using CEF"", ""description"": ""Configure log forwarding using CEF:\n\n1. Navigate to the **System Notifications** section of the Configuration menu.\n\n2. Under Syslog, select **+Add**.\n\n3. In the **New Syslog Server** dialog specify the name, remote server **IP**, **Port**, **Transport** and select **Format** - **CEF**.\n\n4. Press **Apply** to exit the **Add Syslog dialog**."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python --version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Radiflow/Data%20Connectors/RadiflowIsid.json","true" +"NexposeInsightVMCloud_assets_CL","Rapid7InsightVM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Rapid7InsightVM","azuresentinel","azure-sentinel-solution-rapid7insightvm","2021-07-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","InsightVMCloudAPI","Rapid7","Rapid7 Insight Platform Vulnerability Management Reports","The [Rapid7 Insight VM](https://www.rapid7.com/products/insightvm/) Report data connector provides the capability to ingest Scan reports and vulnerability data into Microsoft Sentinel through the REST API from the Rapid7 Insight platform (Managed in the cloud). Refer to [API documentation](https://docs.rapid7.com/insight/api-overview/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Insight VM API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parsers based on a Kusto Function to work as expected [**InsightVMAssets**](https://aka.ms/sentinel-InsightVMAssets-parser) and [**InsightVMVulnerabilities**](https://aka.ms/sentinel-InsightVMVulnerabilities-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Insight VM Cloud**\n\n [Follow the instructions](https://docs.rapid7.com/insight/managing-platform-api-keys/) to obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Rapid7 Insight Vulnerability Management Report data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-InsightVMCloudAPI-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **InsightVMAPIKey**, choose **InsightVMCloudRegion** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Rapid7 Insight Vulnerability Management Report data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://github.com/averbn/azure_sentinel_data_connectors/raw/main/insight-vm-cloud-azure-sentinel-data-connector/InsightVMCloudAPISentinelConn.zip) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tInsightVMAPIKey\n\t\tInsightVMCloudRegion\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials"", ""description"": ""**InsightVMAPIKey** is required for REST API. [See the documentation to learn more about API](https://docs.rapid7.com/insight/api-overview/). Check all [requirements and follow the instructions](https://docs.rapid7.com/insight/managing-platform-api-keys/) for obtaining credentials""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Rapid7InsightVM/Data%20Connectors/InsightVMCloud_API_FunctionApp.json","true" +"NexposeInsightVMCloud_vulnerabilities_CL","Rapid7InsightVM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Rapid7InsightVM","azuresentinel","azure-sentinel-solution-rapid7insightvm","2021-07-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","InsightVMCloudAPI","Rapid7","Rapid7 Insight Platform Vulnerability Management Reports","The [Rapid7 Insight VM](https://www.rapid7.com/products/insightvm/) Report data connector provides the capability to ingest Scan reports and vulnerability data into Microsoft Sentinel through the REST API from the Rapid7 Insight platform (Managed in the cloud). Refer to [API documentation](https://docs.rapid7.com/insight/api-overview/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Insight VM API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parsers based on a Kusto Function to work as expected [**InsightVMAssets**](https://aka.ms/sentinel-InsightVMAssets-parser) and [**InsightVMVulnerabilities**](https://aka.ms/sentinel-InsightVMVulnerabilities-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Insight VM Cloud**\n\n [Follow the instructions](https://docs.rapid7.com/insight/managing-platform-api-keys/) to obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Rapid7 Insight Vulnerability Management Report data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-InsightVMCloudAPI-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **InsightVMAPIKey**, choose **InsightVMCloudRegion** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Rapid7 Insight Vulnerability Management Report data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://github.com/averbn/azure_sentinel_data_connectors/raw/main/insight-vm-cloud-azure-sentinel-data-connector/InsightVMCloudAPISentinelConn.zip) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tInsightVMAPIKey\n\t\tInsightVMCloudRegion\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials"", ""description"": ""**InsightVMAPIKey** is required for REST API. [See the documentation to learn more about API](https://docs.rapid7.com/insight/api-overview/). Check all [requirements and follow the instructions](https://docs.rapid7.com/insight/managing-platform-api-keys/) for obtaining credentials""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Rapid7InsightVM/Data%20Connectors/InsightVMCloud_API_FunctionApp.json","true" +"","Recorded Future","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Recorded%20Future","recordedfuture1605638642586","recorded_future_sentinel_solution","2021-11-01","2023-09-19","","Recorded Future Support Team","Partner","http://support.recordedfuture.com/","","domains","","","","","","","","false" +"","Recorded Future Identity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Recorded%20Future%20Identity","recordedfuture1605638642586","recorded_future_identity_solution","2022-09-06","2025-04-02","","Recorded Future Support Team","Partner","https://support.recordedfuture.com/","","domains","","","","","","","","false" +"RedCanaryDetections_CL","Red Canary","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Red%20Canary","Red Canary","microsoft-sentinel-solution-RedCanary","2022-03-04","2022-03-04","","Red Canary","Partner","https://www.redcanary.com","","domains","RedCanaryDataConnector","Red Canary","Red Canary Threat Detection","The Red Canary data connector provides the capability to ingest published Detections into Microsoft Sentinel using the Data Collector REST API.","[{""title"": """", ""description"": ""Create an Automate Playbook and Trigger as detailed in [this article](https://help.redcanary.com/hc/en-us/articles/4410957523479-Azure-Sentinel). You can skip the **Add analysis rule to Microsoft Sentinel** section; this data connector allows you to import the analysis rule directly into your workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Red%20Canary/Data%20Connectors/RedCanaryDataConnector.json","true" +"","ReversingLabs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ReversingLabs","reversinglabs1597673283347","rl_offer_content_hub_aoae","2022-08-08","2024-07-17","","ReversingLabs","Partner","https://support.reversinglabs.com/hc/en-us","","domains","","","","","","","","false" +"CommonSecurityLog","RidgeSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RidgeSecurity","ridgesecuritytechnologyinc1670890478389","microsoft-sentinel-solution-ridgesecurity","2023-10-23","2023-10-23","","RidgeSecurity","Partner","https://ridgesecurity.ai/about-us/","","domains","RidgeBotDataConnector","RidgeSecurity","[Deprecated] RIDGEBOT - data connector for Microsoft Sentinel","The RidgeBot connector lets users connect RidgeBot with Microsoft Sentinel, allowing creation of Dashboards, Workbooks, Notebooks and Alerts.","[{""title"": """", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine""}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Configure the RidgeBot to forward events to syslog server as described here: https://portal.ridgesecurity.ai/downloadurl/89x72912. Generate some attack events for your application.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RidgeSecurity/Data%20Connectors/RidgeSecurity.json","true" +"","RiskIQ","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RiskIQ","azuresentinel","azure-sentinel-solution-riskiq","2021-10-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"Rubrik_Anomaly_Data_CL","RubrikSecurityCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RubrikSecurityCloud","rubrik_inc","rubrik_sentinel","2022-07-19","2025-07-25","","Rubrik","Partner","https://support.rubrik.com","","domains","RubrikSecurityCloudAzureFunctions","Rubrik, Inc","Rubrik Security Cloud data connector","The Rubrik Security Cloud data connector enables security operations teams to integrate insights from Rubrik's Data Observability services into Microsoft Sentinel. The insights include identification of anomalous filesystem behavior associated with ransomware and mass deletion, assess the blast radius of a ransomware attack, and sensitive data operators to prioritize and more rapidly investigate potential incidents.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Rubrik webhook which push its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Rubrik Microsoft Sentinel data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Rubrik connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-RubrikWebhookEvents-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tAnomaliesTableName \n\t\tRansomwareAnalysisTableName \n\t\tThreatHuntsTableName \n\t\tEventsTableName \n\t\tLogLevel \n \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Rubrik Microsoft Sentinel data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-RubrikWebhookEvents-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. RubrikXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tAnomaliesTableName\n\t\tRansomwareAnalysisTableName\n\t\tThreatHuntsTableName\n\t\tEventsTableName\n\t\tLogLevel\n\t\tlogAnalyticsUri (optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: https://.ods.opinsights.azure.us. \n4. Once all application settings have been entered, click **Save**.""}, {""title"": """", ""description"": ""**Post Deployment steps**\n\n""}, {""title"": ""1) Get the Function app endpoint"", ""description"": ""1. Go to Azure function Overview page and Click on **\""Functions\""** tab.\n2. Click on the function called **\""RubrikHttpStarter\""**.\n3. Go to **\""GetFunctionurl\""** and copy the function url.""}, {""title"": ""2) Add a webhook in RubrikSecurityCloud to send data to Microsoft Sentinel."", ""description"": ""Follow the Rubrik User Guide instructions to [Add a Webhook](https://docs.rubrik.com/en-us/saas/saas/common/adding_webhook.html) to begin receiving event information \n 1. Select the Microsoft Sentinel as the webhook Provider \n 2. Enter the desired Webhook name \n 3. Enter the URL part from copied Function-url as the webhook URL endpoint and replace **{functionname}** with **\""RubrikAnomalyOrchestrator\""**, for the Rubrik Microsoft Sentinel Solution \n 4. Select the EventType as Anomaly \n 5. Select the following severity levels: Critical, Warning, Informational \n 6. Choose multiple log types, if desired, when running **\""RubrikEventsOrchestrator\""** \n 7. Repeat the same steps to add webhooks for Anomaly Detection Analysis, Threat Hunt and Other Events.\n \n\n NOTE: while adding webhooks for Anomaly Detection Analysis, Threat Hunt and Other Events, replace **{functionname}** with **\""RubrikRansomwareOrchestrator\""**, **\""RubrikThreatHuntOrchestrator\""** and **\""RubrikEventsOrchestrator\""** respectively in copied function-url.""}, {""title"": """", ""description"": ""*Now we are done with the rubrik Webhook configuration. Once the webhook events triggered , you should be able to see the Anomaly, Anomaly Detection Analysis, Threat Hunt events and Other Events from the Rubrik into respective LogAnalytics workspace table called \""Rubrik_Anomaly_Data_CL\"", \""Rubrik_Ransomware_Data_CL\"", \""Rubrik_ThreatHunt_Data_CL\"", and \""Rubrik_Events_Data_CL\"".*\n\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RubrikSecurityCloud/Data%20Connectors/RubrikWebhookEvents/RubrikWebhookEvents_FunctionApp.json","true" +"Rubrik_Events_Data_CL","RubrikSecurityCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RubrikSecurityCloud","rubrik_inc","rubrik_sentinel","2022-07-19","2025-07-25","","Rubrik","Partner","https://support.rubrik.com","","domains","RubrikSecurityCloudAzureFunctions","Rubrik, Inc","Rubrik Security Cloud data connector","The Rubrik Security Cloud data connector enables security operations teams to integrate insights from Rubrik's Data Observability services into Microsoft Sentinel. The insights include identification of anomalous filesystem behavior associated with ransomware and mass deletion, assess the blast radius of a ransomware attack, and sensitive data operators to prioritize and more rapidly investigate potential incidents.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Rubrik webhook which push its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Rubrik Microsoft Sentinel data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Rubrik connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-RubrikWebhookEvents-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tAnomaliesTableName \n\t\tRansomwareAnalysisTableName \n\t\tThreatHuntsTableName \n\t\tEventsTableName \n\t\tLogLevel \n \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Rubrik Microsoft Sentinel data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-RubrikWebhookEvents-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. RubrikXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tAnomaliesTableName\n\t\tRansomwareAnalysisTableName\n\t\tThreatHuntsTableName\n\t\tEventsTableName\n\t\tLogLevel\n\t\tlogAnalyticsUri (optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: https://.ods.opinsights.azure.us. \n4. Once all application settings have been entered, click **Save**.""}, {""title"": """", ""description"": ""**Post Deployment steps**\n\n""}, {""title"": ""1) Get the Function app endpoint"", ""description"": ""1. Go to Azure function Overview page and Click on **\""Functions\""** tab.\n2. Click on the function called **\""RubrikHttpStarter\""**.\n3. Go to **\""GetFunctionurl\""** and copy the function url.""}, {""title"": ""2) Add a webhook in RubrikSecurityCloud to send data to Microsoft Sentinel."", ""description"": ""Follow the Rubrik User Guide instructions to [Add a Webhook](https://docs.rubrik.com/en-us/saas/saas/common/adding_webhook.html) to begin receiving event information \n 1. Select the Microsoft Sentinel as the webhook Provider \n 2. Enter the desired Webhook name \n 3. Enter the URL part from copied Function-url as the webhook URL endpoint and replace **{functionname}** with **\""RubrikAnomalyOrchestrator\""**, for the Rubrik Microsoft Sentinel Solution \n 4. Select the EventType as Anomaly \n 5. Select the following severity levels: Critical, Warning, Informational \n 6. Choose multiple log types, if desired, when running **\""RubrikEventsOrchestrator\""** \n 7. Repeat the same steps to add webhooks for Anomaly Detection Analysis, Threat Hunt and Other Events.\n \n\n NOTE: while adding webhooks for Anomaly Detection Analysis, Threat Hunt and Other Events, replace **{functionname}** with **\""RubrikRansomwareOrchestrator\""**, **\""RubrikThreatHuntOrchestrator\""** and **\""RubrikEventsOrchestrator\""** respectively in copied function-url.""}, {""title"": """", ""description"": ""*Now we are done with the rubrik Webhook configuration. Once the webhook events triggered , you should be able to see the Anomaly, Anomaly Detection Analysis, Threat Hunt events and Other Events from the Rubrik into respective LogAnalytics workspace table called \""Rubrik_Anomaly_Data_CL\"", \""Rubrik_Ransomware_Data_CL\"", \""Rubrik_ThreatHunt_Data_CL\"", and \""Rubrik_Events_Data_CL\"".*\n\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RubrikSecurityCloud/Data%20Connectors/RubrikWebhookEvents/RubrikWebhookEvents_FunctionApp.json","true" +"Rubrik_Ransomware_Data_CL","RubrikSecurityCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RubrikSecurityCloud","rubrik_inc","rubrik_sentinel","2022-07-19","2025-07-25","","Rubrik","Partner","https://support.rubrik.com","","domains","RubrikSecurityCloudAzureFunctions","Rubrik, Inc","Rubrik Security Cloud data connector","The Rubrik Security Cloud data connector enables security operations teams to integrate insights from Rubrik's Data Observability services into Microsoft Sentinel. The insights include identification of anomalous filesystem behavior associated with ransomware and mass deletion, assess the blast radius of a ransomware attack, and sensitive data operators to prioritize and more rapidly investigate potential incidents.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Rubrik webhook which push its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Rubrik Microsoft Sentinel data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Rubrik connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-RubrikWebhookEvents-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tAnomaliesTableName \n\t\tRansomwareAnalysisTableName \n\t\tThreatHuntsTableName \n\t\tEventsTableName \n\t\tLogLevel \n \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Rubrik Microsoft Sentinel data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-RubrikWebhookEvents-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. RubrikXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tAnomaliesTableName\n\t\tRansomwareAnalysisTableName\n\t\tThreatHuntsTableName\n\t\tEventsTableName\n\t\tLogLevel\n\t\tlogAnalyticsUri (optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: https://.ods.opinsights.azure.us. \n4. Once all application settings have been entered, click **Save**.""}, {""title"": """", ""description"": ""**Post Deployment steps**\n\n""}, {""title"": ""1) Get the Function app endpoint"", ""description"": ""1. Go to Azure function Overview page and Click on **\""Functions\""** tab.\n2. Click on the function called **\""RubrikHttpStarter\""**.\n3. Go to **\""GetFunctionurl\""** and copy the function url.""}, {""title"": ""2) Add a webhook in RubrikSecurityCloud to send data to Microsoft Sentinel."", ""description"": ""Follow the Rubrik User Guide instructions to [Add a Webhook](https://docs.rubrik.com/en-us/saas/saas/common/adding_webhook.html) to begin receiving event information \n 1. Select the Microsoft Sentinel as the webhook Provider \n 2. Enter the desired Webhook name \n 3. Enter the URL part from copied Function-url as the webhook URL endpoint and replace **{functionname}** with **\""RubrikAnomalyOrchestrator\""**, for the Rubrik Microsoft Sentinel Solution \n 4. Select the EventType as Anomaly \n 5. Select the following severity levels: Critical, Warning, Informational \n 6. Choose multiple log types, if desired, when running **\""RubrikEventsOrchestrator\""** \n 7. Repeat the same steps to add webhooks for Anomaly Detection Analysis, Threat Hunt and Other Events.\n \n\n NOTE: while adding webhooks for Anomaly Detection Analysis, Threat Hunt and Other Events, replace **{functionname}** with **\""RubrikRansomwareOrchestrator\""**, **\""RubrikThreatHuntOrchestrator\""** and **\""RubrikEventsOrchestrator\""** respectively in copied function-url.""}, {""title"": """", ""description"": ""*Now we are done with the rubrik Webhook configuration. Once the webhook events triggered , you should be able to see the Anomaly, Anomaly Detection Analysis, Threat Hunt events and Other Events from the Rubrik into respective LogAnalytics workspace table called \""Rubrik_Anomaly_Data_CL\"", \""Rubrik_Ransomware_Data_CL\"", \""Rubrik_ThreatHunt_Data_CL\"", and \""Rubrik_Events_Data_CL\"".*\n\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RubrikSecurityCloud/Data%20Connectors/RubrikWebhookEvents/RubrikWebhookEvents_FunctionApp.json","true" +"Rubrik_ThreatHunt_Data_CL","RubrikSecurityCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RubrikSecurityCloud","rubrik_inc","rubrik_sentinel","2022-07-19","2025-07-25","","Rubrik","Partner","https://support.rubrik.com","","domains","RubrikSecurityCloudAzureFunctions","Rubrik, Inc","Rubrik Security Cloud data connector","The Rubrik Security Cloud data connector enables security operations teams to integrate insights from Rubrik's Data Observability services into Microsoft Sentinel. The insights include identification of anomalous filesystem behavior associated with ransomware and mass deletion, assess the blast radius of a ransomware attack, and sensitive data operators to prioritize and more rapidly investigate potential incidents.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Rubrik webhook which push its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Rubrik Microsoft Sentinel data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Rubrik connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-RubrikWebhookEvents-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tAnomaliesTableName \n\t\tRansomwareAnalysisTableName \n\t\tThreatHuntsTableName \n\t\tEventsTableName \n\t\tLogLevel \n \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Rubrik Microsoft Sentinel data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-RubrikWebhookEvents-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. RubrikXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tAnomaliesTableName\n\t\tRansomwareAnalysisTableName\n\t\tThreatHuntsTableName\n\t\tEventsTableName\n\t\tLogLevel\n\t\tlogAnalyticsUri (optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: https://.ods.opinsights.azure.us. \n4. Once all application settings have been entered, click **Save**.""}, {""title"": """", ""description"": ""**Post Deployment steps**\n\n""}, {""title"": ""1) Get the Function app endpoint"", ""description"": ""1. Go to Azure function Overview page and Click on **\""Functions\""** tab.\n2. Click on the function called **\""RubrikHttpStarter\""**.\n3. Go to **\""GetFunctionurl\""** and copy the function url.""}, {""title"": ""2) Add a webhook in RubrikSecurityCloud to send data to Microsoft Sentinel."", ""description"": ""Follow the Rubrik User Guide instructions to [Add a Webhook](https://docs.rubrik.com/en-us/saas/saas/common/adding_webhook.html) to begin receiving event information \n 1. Select the Microsoft Sentinel as the webhook Provider \n 2. Enter the desired Webhook name \n 3. Enter the URL part from copied Function-url as the webhook URL endpoint and replace **{functionname}** with **\""RubrikAnomalyOrchestrator\""**, for the Rubrik Microsoft Sentinel Solution \n 4. Select the EventType as Anomaly \n 5. Select the following severity levels: Critical, Warning, Informational \n 6. Choose multiple log types, if desired, when running **\""RubrikEventsOrchestrator\""** \n 7. Repeat the same steps to add webhooks for Anomaly Detection Analysis, Threat Hunt and Other Events.\n \n\n NOTE: while adding webhooks for Anomaly Detection Analysis, Threat Hunt and Other Events, replace **{functionname}** with **\""RubrikRansomwareOrchestrator\""**, **\""RubrikThreatHuntOrchestrator\""** and **\""RubrikEventsOrchestrator\""** respectively in copied function-url.""}, {""title"": """", ""description"": ""*Now we are done with the rubrik Webhook configuration. Once the webhook events triggered , you should be able to see the Anomaly, Anomaly Detection Analysis, Threat Hunt events and Other Events from the Rubrik into respective LogAnalytics workspace table called \""Rubrik_Anomaly_Data_CL\"", \""Rubrik_Ransomware_Data_CL\"", \""Rubrik_ThreatHunt_Data_CL\"", and \""Rubrik_Events_Data_CL\"".*\n\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RubrikSecurityCloud/Data%20Connectors/RubrikWebhookEvents/RubrikWebhookEvents_FunctionApp.json","true" +"","SAP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP","","","","","","","","","","","","","","","","","","false" +"SAPBTPAuditLog_CL","SAP BTP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20BTP","sentinel4sap","sap_btp_sentinel_solution","2023-04-04","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SAPBTPAuditEvents","Microsoft","SAP BTP","SAP Business Technology Platform (SAP BTP) brings together data management, analytics, artificial intelligence, application development, automation, and integration in one, unified environment.","[{""description"": ""**Step 1 - Configuration steps for the SAP BTP Audit Retrieval API**\n\nFollow the steps provided by SAP [see Audit Log Retrieval API for Global Accounts in the Cloud Foundry Environment](https://help.sap.com/docs/btp/sap-business-technology-platform/audit-log-retrieval-api-for-global-accounts-in-cloud-foundry-environment/). Take a note of the **url** (Audit Retrieval API URL), **uaa.url** (User Account and Authentication Server url) and the associated **uaa.clientid**.\n\n>**NOTE:** You can onboard one or more BTP subaccounts by following the steps provided by SAP [see Audit Log Retrieval API Usage for Subaccounts in the Cloud Foundry Environment](https://help.sap.com/docs/btp/sap-business-technology-platform/audit-log-retrieval-api-usage-for-subaccounts-in-cloud-foundry-environment/). Add a connection for each subaccount.""}, {""description"": ""Connect using OAuth client credentials"", ""title"": ""Connect events from SAP BTP to Microsoft Sentinel"", ""instructions"": [{""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""label"": ""Add account"", ""isPrimary"": true, ""title"": ""BTP connection"", ""instructionSteps"": [{""title"": ""Account Details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Subaccount name (e.g. Contoso). This will be projected to the InstanceName column."", ""placeholder"": ""no space or special character allowed!"", ""type"": ""text"", ""name"": ""subaccountName""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""SAP BTP Client ID"", ""placeholder"": ""Client ID"", ""type"": ""text"", ""name"": ""clientId""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""SAP BTP Client Secret"", ""placeholder"": ""Client Secret"", ""type"": ""password"", ""name"": ""clientSecret""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Authorization server URL (UAA server)"", ""placeholder"": ""https://your-tenant.authentication.region.hana.ondemand.com"", ""type"": ""text"", ""name"": ""authServerUrl""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Audit Retrieval API URL"", ""placeholder"": ""https://auditlog-management.cfapps.region.hana.ondemand.com"", ""type"": ""text"", ""name"": ""auditHost""}}]}]}}]}, {""title"": ""Subaccounts"", ""description"": ""Each row represents a connected subaccount"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Subaccount Name"", ""columnValue"": ""name""}], ""menuItems"": [""DeleteConnector""]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Client Id and Client Secret for Audit Retrieval API"", ""description"": ""Enable API access in BTP.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20BTP/Data%20Connectors/SAPBTPPollerConnector/SAPBTP_DataConnectorDefinition.json","true" +"SAPETDAlerts_CL","SAP ETD Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20ETD%20Cloud","sap_jasondau","azure-sentinel-solution-sapetd","2025-02-17","2025-09-11","","SAP","Partner","https://help.sap.com/docs/SAP_ENTERPRISE_THREAT_DETECTION_CLOUD_EDITION","","domains","SAPETDAlerts","SAP","SAP Enterprise Threat Detection, cloud edition","The SAP Enterprise Threat Detection, cloud edition (ETD) data connector enables ingestion of security alerts from ETD into Microsoft Sentinel, supporting cross-correlation, alerting, and threat hunting.","[{""description"": ""**Step 1 - Configuration steps for the SAP ETD Audit Retrieval API**\n\nFollow the steps provided by SAP [see ETD docs](https://help.sap.com/docs/ETD/sap-business-technology-platform/audit-log-retrieval-api-for-global-accounts-in-cloud-foundry-environment/). Take a note of the **url** (Audit Retrieval API URL), **uaa.url** (User Account and Authentication Server url) and the associated **uaa.clientid**.\n\n>**NOTE:** You can onboard one or more ETD subaccounts by following the steps provided by SAP [see ETD docs](https://help.sap.com/docs/ETD/sap-business-technology-platform/audit-log-retrieval-api-usage-for-subaccounts-in-cloud-foundry-environment/). Add a connection for each subaccount.\n\n>**TIP:** Use the [shared blog series](https://community.sap.com/t5/enterprise-resource-planning-blog-posts-by-sap/sap-enterprise-threat-detection-cloud-edition-joins-forces-with-microsoft/ba-p/13942075) for additional info.""}, {""description"": ""Connect using OAuth client credentials"", ""title"": ""Connect events from SAP ETD to Microsoft Sentinel"", ""instructions"": [{""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""label"": ""Add account"", ""isPrimary"": true, ""title"": ""ETD connection"", ""instructionSteps"": [{""title"": ""Account Details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""SAP ETD Client ID"", ""placeholder"": ""Client ID"", ""type"": ""text"", ""name"": ""clientId""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""SAP ETD Client Secret"", ""placeholder"": ""Client Secret"", ""type"": ""password"", ""name"": ""clientSecret""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Authorization server URL (UAA server)"", ""placeholder"": ""https://your-tenant.authentication.region.hana.ondemand.com/oauth/token"", ""type"": ""text"", ""name"": ""authServerUrl""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""SAP ETD data retrieval API URL"", ""placeholder"": ""https://your-etd-cloud-data-retrieval-service.cfapps.region.hana.ondemand.com"", ""type"": ""text"", ""name"": ""etdHost""}}]}]}}]}, {""title"": ""ETD accounts"", ""description"": ""Each row represents a connected ETD account"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Data retrieval endpoint"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Client Id and Client Secret for ETD Retrieval API"", ""description"": ""Enable API access in ETD.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20ETD%20Cloud/Data%20Connectors/SAPETD_PUSH_CCP/SAPETD_connectorDefinition.json","true" +"SAPETDInvestigations_CL","SAP ETD Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20ETD%20Cloud","sap_jasondau","azure-sentinel-solution-sapetd","2025-02-17","2025-09-11","","SAP","Partner","https://help.sap.com/docs/SAP_ENTERPRISE_THREAT_DETECTION_CLOUD_EDITION","","domains","SAPETDAlerts","SAP","SAP Enterprise Threat Detection, cloud edition","The SAP Enterprise Threat Detection, cloud edition (ETD) data connector enables ingestion of security alerts from ETD into Microsoft Sentinel, supporting cross-correlation, alerting, and threat hunting.","[{""description"": ""**Step 1 - Configuration steps for the SAP ETD Audit Retrieval API**\n\nFollow the steps provided by SAP [see ETD docs](https://help.sap.com/docs/ETD/sap-business-technology-platform/audit-log-retrieval-api-for-global-accounts-in-cloud-foundry-environment/). Take a note of the **url** (Audit Retrieval API URL), **uaa.url** (User Account and Authentication Server url) and the associated **uaa.clientid**.\n\n>**NOTE:** You can onboard one or more ETD subaccounts by following the steps provided by SAP [see ETD docs](https://help.sap.com/docs/ETD/sap-business-technology-platform/audit-log-retrieval-api-usage-for-subaccounts-in-cloud-foundry-environment/). Add a connection for each subaccount.\n\n>**TIP:** Use the [shared blog series](https://community.sap.com/t5/enterprise-resource-planning-blog-posts-by-sap/sap-enterprise-threat-detection-cloud-edition-joins-forces-with-microsoft/ba-p/13942075) for additional info.""}, {""description"": ""Connect using OAuth client credentials"", ""title"": ""Connect events from SAP ETD to Microsoft Sentinel"", ""instructions"": [{""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""label"": ""Add account"", ""isPrimary"": true, ""title"": ""ETD connection"", ""instructionSteps"": [{""title"": ""Account Details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""SAP ETD Client ID"", ""placeholder"": ""Client ID"", ""type"": ""text"", ""name"": ""clientId""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""SAP ETD Client Secret"", ""placeholder"": ""Client Secret"", ""type"": ""password"", ""name"": ""clientSecret""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Authorization server URL (UAA server)"", ""placeholder"": ""https://your-tenant.authentication.region.hana.ondemand.com/oauth/token"", ""type"": ""text"", ""name"": ""authServerUrl""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""SAP ETD data retrieval API URL"", ""placeholder"": ""https://your-etd-cloud-data-retrieval-service.cfapps.region.hana.ondemand.com"", ""type"": ""text"", ""name"": ""etdHost""}}]}]}}]}, {""title"": ""ETD accounts"", ""description"": ""Each row represents a connected ETD account"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Data retrieval endpoint"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Client Id and Client Secret for ETD Retrieval API"", ""description"": ""Enable API access in ETD.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20ETD%20Cloud/Data%20Connectors/SAPETD_PUSH_CCP/SAPETD_connectorDefinition.json","true" +"SAPLogServ_CL","SAP LogServ","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20LogServ","sap_jasondau","azure-sentinel-solution-saplogserv","2025-02-17","2025-07-18","","SAP","Partner","https://community.sap.com/t5/enterprise-resource-planning-blogs-by-sap/announcing-limited-preview-of-sap-logserv-integration-with-microsoft/ba-p/13942180","","domains","SAPLogServ","SAP SE","SAP LogServ (RISE), S/4HANA Cloud private edition","SAP LogServ is an SAP Enterprise Cloud Services (ECS) service aimed at collection, storage, forwarding and access of logs. LogServ centralizes the logs from all systems, applications, and ECS services used by a registered customer.
Main Features include:
Near Realtime Log Collection: With ability to integrate into Microsoft Sentinel as SIEM solution.
LogServ complements the existing SAP application layer threat monitoring and detections in Microsoft Sentinel with the log types owned by SAP ECS as the system provider. This includes logs like: SAP Security Audit Log (AS ABAP), HANA database, AS JAVA, ICM, SAP Web Dispatcher, SAP Cloud Connector, OS, SAP Gateway, 3rd party Database, Network, DNS, Proxy, Firewall","[{""title"": ""1. Create ARM Resources and Provide the Required Permissions"", ""description"": ""We will create data collection rule (DCR) and data collection endpoint (DCE) resources. We will also create a Microsoft Entra app registration and assign the required permissions to it."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated deployment of Azure resources\nClicking on \""Deploy push connector resources\"" will trigger the creation of DCR and DCE resources.\nIt will then create a Microsoft Entra app registration with client secret and grant permissions on the DCR. This setup enables data to be sent securely to the DCR using a OAuth v2 client credentials.""}}, {""parameters"": {""label"": ""Deploy push connector resources"", ""applicationDisplayName"": ""SAP LogServ push to Microsoft Sentinel""}, ""type"": ""DeployPushConnectorButton_test""}]}, {""title"": ""2. Maintain the data collection endpoint details and authentication info in SAP LogServ"", ""description"": ""Share the data collection endpoint URL and authentication info with the SAP LogServ administrator to configure the SAP LogServ to send data to the data collection endpoint.\n\nLearn more from [this blog series](https://community.sap.com/t5/enterprise-resource-planning-blog-posts-by-members/ultimate-blog-series-sap-logserv-integration-with-microsoft-sentinel/ba-p/14126401)."", ""instructions"": [{""parameters"": {""label"": ""Use this value to configure as Tenant ID in the LogIngestionAPI credential."", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra Application ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra Application Secret"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the Application Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Use this value to configure the LogsIngestionURL parameter when deploying the IFlow."", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the DCE URI""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""DCR Immutable ID"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the DCR ID""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rules. Typically requires Azure RBAC Owner or User Access Administrator role.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20LogServ/Data%20Connectors/SAPLogServ.json;https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20LogServ/Data%20Connectors/SAPLogServ_PUSH_CCP/SAPLogServ_connectorDefinition.json","false" +"ABAPAuditLog","SAP S4 Cloud Public Edition","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20S4%20Cloud%20Public%20Edition","sap_jasondau","azure-sentinel-solution-s4hana-public","2025-09-12","","","SAP","Partner","https://api.sap.com/api/SecurityAuditLog_ODataService/overview","","domains","SAPS4PublicAlerts","SAP","SAP S/4HANA Cloud Public Edition","The SAP S/4HANA Cloud Public Edition (GROW with SAP) data connector enables ingestion of SAP's security audit log into the Microsoft Sentinel Solution for SAP, supporting cross-correlation, alerting, and threat hunting. Looking for alternative authentication mechanisms? See [here](https://github.com/Azure-Samples/Sentinel-For-SAP-Community/tree/main/integration-artifacts).","[{""description"": ""**Step 1 - Configuration steps for SAP S/4HANA Cloud Public Edition**\n\nTo connect to SAP S/4HANA Cloud Public Edition, you will need:\n\n1. Configure a communication arrangement for communication scenario **[SAP_COM_0750](https://help.sap.com/docs/SAP_S4HANA_CLOUD/0f69f8fb28ac4bf48d2b57b9637e81fa/a93dca70e2ce43d19ac93e3e5531e37d.html)** \n\n2. SAP S/4HANA Cloud Public Edition tenant **API URL**\n3. Valid **communication user (username and password)** for your SAP S/4HANA Cloud system\n4. **Appropriate authorizations** to access audit log data via OData services\n\n>**NOTE:** This connector supports Basic authentication. Looking for alternative authentication mechanisms? See [here](https://github.com/Azure-Samples/Sentinel-For-SAP-Community/tree/main/integration-artifacts)""}, {""description"": ""Connect using Basic authentication"", ""title"": ""Connect events from SAP S/4HANA Cloud Public Edition to Microsoft Sentinel Solution for SAP"", ""instructions"": [{""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""label"": ""Add account"", ""isPrimary"": true, ""title"": ""S/4HANA Cloud Public Edition connection"", ""instructionSteps"": [{""title"": ""Account Details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Username"", ""placeholder"": ""Enter your SAP S/4HANA Cloud username"", ""type"": ""text"", ""name"": ""username""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Password"", ""placeholder"": ""Enter your SAP S/4HANA Cloud password"", ""type"": ""password"", ""name"": ""password""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""SAP S/4HANA Cloud API URL"", ""placeholder"": ""https://my123456-api.s4hana.cloud.sap"", ""type"": ""text"", ""name"": ""s4hanaHost""}}]}]}}]}, {""title"": ""S/4HANA Cloud Public Edition connections"", ""description"": ""Each row represents a connected S/4HANA Cloud Public Edition system"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""S/4HANA Cloud API endpoint"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Client Id and Client Secret for Audit Retrieval API"", ""description"": ""Enable API access in BTP.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20S4%20Cloud%20Public%20Edition/Data%20Connectors/SAPS4PublicPollerConnector/SAPS4Public_connectorDefinition.json","true" +"SIGNL4_CL","SIGNL4","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SIGNL4","derdack","azure-sentinel-solution-signl4","2021-12-10","2021-12-10","","Derdack","Partner","https://www.signl4.com","","domains","DerdackSIGNL4","Derdack","Derdack SIGNL4","When critical systems fail or security incidents happen, SIGNL4 bridges the ‘last mile’ to your staff, engineers, IT admins and workers in the field. It adds real-time mobile alerting to your services, systems, and processes in no time. SIGNL4 notifies through persistent mobile push, SMS text and voice calls with acknowledgement, tracking and escalation. Integrated duty and shift scheduling ensure the right people are alerted at the right time.

[Learn more >](https://www.signl4.com)","[{""title"": """", ""description"": "">**NOTE:** This data connector is mainly configured on the SIGNL4 side. You can find a description video here: [**Integrate SIGNL4 with Microsoft Sentinel**](https://www.signl4.com/blog/portfolio_item/azure-sentinel-mobile-alert-notification-duty-schedule-escalation/)."", ""instructions"": []}, {""title"": """", ""description"": "">**SIGNL4 Connector:** The SIGNL4 connector for Microsoft Sentinel, Azure Security Center and other Azure Graph Security API providers provides seamless 2-way integration with your Azure Security solutions. Once added to your SIGNL4 team, the connector will read security alerts from Azure Graph Security API and fully automatically and trigger alert notifications to your team members on duty. It will also synchronize the alert status from SIGNL4 to Graph Security API, so that if alerts are acknowledged or closed, this status is also updated on the according Azure Graph Security API alert or the corresponding security provider. As mentioned, the connector mainly uses Azure Graph Security API, but for some security providers, such as Microsoft Sentinel, it also uses dedicated REST APIs from according Azure solutions."", ""instructions"": []}, {""title"": ""Microsoft Sentinel Features"", ""description"": ""Microsoft Sentinel is a cloud native SIEM solution from Microsoft and a security alert provider in Azure Graph Security API. However, the level of alert details available with the Graph Security API is limited for Microsoft Sentinel. The connector can therefore augment alerts with further details (insights rule search results), from the underlying Microsoft Sentinel Log Analytics workspace. To be able to do that, the connector communicates with Azure Log Analytics REST API and needs according permissions (see below). Furthermore, the app can also update the status of Microsoft Sentinel incidents, when all related security alerts are e.g. in progress or resolved. In order to be able to do that, the connector needs to be a member of the 'Microsoft Sentinel Contributors' group in your Azure Subscription.\n **Automated deployment in Azure**\n The credentials required to access the beforementioned APIs, are generated by a small PowerShell script that you can download below. The script performs the following tasks for you:\n - Logs you on to your Azure Subscription (please login with an administrator account)\n - Creates a new enterprise application for this connector in your Azure AD, also referred to as service principal\n - Creates a new role in your Azure IAM that grants read/query permission to only Azure Log Analytics workspaces.\n - Joins the enterprise application to that user role\n - Joins the enterprise application to the 'Microsoft Sentinel Contributors' role\n - Outputs some data that you need to configure app (see below)"", ""instructions"": []}, {""title"": ""Deployment procedure"", ""description"": ""1. Download the PowerShell deployment script from [here](https://github.com/signl4/signl4-integration-azuresentinel/blob/master/registerSIGNL4Client.ps1).\n2. Review the script and the roles and permission scopes it deploys for the new app registration. If you don't want to use the connector with Microsoft Sentinel, you could remove all role creation and role assignment code and only use it to create the app registration (SPN) in your Azure Active Directory.\n3. Run the script. At the end it outputs information that you need to enter in the connector app configuration.\n4. In Azure AD, click on 'App Registrations'. Find the app with the name 'SIGNL4AzureSecurity' and open its details\n5. On the left menu blade click 'API Permissions'. Then click 'Add a permission'.\n6. On the blade that loads, under 'Microsoft APIs' click on the 'Microsoft Graph' tile, then click 'App permission'.\n7. In the table that is displayed expand 'SecurityEvents' and check 'SecurityEvents.Read.All' and 'SecurityEvents.ReadWrite.All'.\n8. Click 'Add permissions'."", ""instructions"": []}, {""title"": ""Configuring the SIGNL4 connector app"", ""description"": ""Finally, enter the IDs, that the script has outputted in the connector configuration:\n - Azure Tenant ID\n - Azure Subscription ID\n - Client ID (of the enterprise application)\n - Client Secret (of the enterprise application)\n Once the app is enabled, it will start reading your Azure Graph Security API alerts.\n\n>**NOTE:** It will initially only read the alerts that have occurred within the last 24 hours."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SIGNL4/Data%20Connectors/DerdackSIGNL4.json","true" +"SecurityIncident","SIGNL4","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SIGNL4","derdack","azure-sentinel-solution-signl4","2021-12-10","2021-12-10","","Derdack","Partner","https://www.signl4.com","","domains","DerdackSIGNL4","Derdack","Derdack SIGNL4","When critical systems fail or security incidents happen, SIGNL4 bridges the ‘last mile’ to your staff, engineers, IT admins and workers in the field. It adds real-time mobile alerting to your services, systems, and processes in no time. SIGNL4 notifies through persistent mobile push, SMS text and voice calls with acknowledgement, tracking and escalation. Integrated duty and shift scheduling ensure the right people are alerted at the right time.

[Learn more >](https://www.signl4.com)","[{""title"": """", ""description"": "">**NOTE:** This data connector is mainly configured on the SIGNL4 side. You can find a description video here: [**Integrate SIGNL4 with Microsoft Sentinel**](https://www.signl4.com/blog/portfolio_item/azure-sentinel-mobile-alert-notification-duty-schedule-escalation/)."", ""instructions"": []}, {""title"": """", ""description"": "">**SIGNL4 Connector:** The SIGNL4 connector for Microsoft Sentinel, Azure Security Center and other Azure Graph Security API providers provides seamless 2-way integration with your Azure Security solutions. Once added to your SIGNL4 team, the connector will read security alerts from Azure Graph Security API and fully automatically and trigger alert notifications to your team members on duty. It will also synchronize the alert status from SIGNL4 to Graph Security API, so that if alerts are acknowledged or closed, this status is also updated on the according Azure Graph Security API alert or the corresponding security provider. As mentioned, the connector mainly uses Azure Graph Security API, but for some security providers, such as Microsoft Sentinel, it also uses dedicated REST APIs from according Azure solutions."", ""instructions"": []}, {""title"": ""Microsoft Sentinel Features"", ""description"": ""Microsoft Sentinel is a cloud native SIEM solution from Microsoft and a security alert provider in Azure Graph Security API. However, the level of alert details available with the Graph Security API is limited for Microsoft Sentinel. The connector can therefore augment alerts with further details (insights rule search results), from the underlying Microsoft Sentinel Log Analytics workspace. To be able to do that, the connector communicates with Azure Log Analytics REST API and needs according permissions (see below). Furthermore, the app can also update the status of Microsoft Sentinel incidents, when all related security alerts are e.g. in progress or resolved. In order to be able to do that, the connector needs to be a member of the 'Microsoft Sentinel Contributors' group in your Azure Subscription.\n **Automated deployment in Azure**\n The credentials required to access the beforementioned APIs, are generated by a small PowerShell script that you can download below. The script performs the following tasks for you:\n - Logs you on to your Azure Subscription (please login with an administrator account)\n - Creates a new enterprise application for this connector in your Azure AD, also referred to as service principal\n - Creates a new role in your Azure IAM that grants read/query permission to only Azure Log Analytics workspaces.\n - Joins the enterprise application to that user role\n - Joins the enterprise application to the 'Microsoft Sentinel Contributors' role\n - Outputs some data that you need to configure app (see below)"", ""instructions"": []}, {""title"": ""Deployment procedure"", ""description"": ""1. Download the PowerShell deployment script from [here](https://github.com/signl4/signl4-integration-azuresentinel/blob/master/registerSIGNL4Client.ps1).\n2. Review the script and the roles and permission scopes it deploys for the new app registration. If you don't want to use the connector with Microsoft Sentinel, you could remove all role creation and role assignment code and only use it to create the app registration (SPN) in your Azure Active Directory.\n3. Run the script. At the end it outputs information that you need to enter in the connector app configuration.\n4. In Azure AD, click on 'App Registrations'. Find the app with the name 'SIGNL4AzureSecurity' and open its details\n5. On the left menu blade click 'API Permissions'. Then click 'Add a permission'.\n6. On the blade that loads, under 'Microsoft APIs' click on the 'Microsoft Graph' tile, then click 'App permission'.\n7. In the table that is displayed expand 'SecurityEvents' and check 'SecurityEvents.Read.All' and 'SecurityEvents.ReadWrite.All'.\n8. Click 'Add permissions'."", ""instructions"": []}, {""title"": ""Configuring the SIGNL4 connector app"", ""description"": ""Finally, enter the IDs, that the script has outputted in the connector configuration:\n - Azure Tenant ID\n - Azure Subscription ID\n - Client ID (of the enterprise application)\n - Client Secret (of the enterprise application)\n Once the app is enabled, it will start reading your Azure Graph Security API alerts.\n\n>**NOTE:** It will initially only read the alerts that have occurred within the last 24 hours."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SIGNL4/Data%20Connectors/DerdackSIGNL4.json","true" +"SINECSecurityGuard_CL","SINEC Security Guard","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SINEC%20Security%20Guard","siemensplmsoftware","azure-sentinel-solution-ssg","2024-07-15","","","Siemens AG","Partner","https://siemens.com/sinec-security-guard","","domains,verticals","SSG","Siemens AG","SINEC Security Guard","The SINEC Security Guard solution for Microsoft Sentinel allows you to ingest security events of your industrial networks from the [SINEC Security Guard](https://siemens.com/sinec-security-guard) into Microsoft Sentinel","[{""description"": ""This Data Connector relies on the SINEC Security Guard Sensor Package to be able to receive Sensor events in Microsoft Sentinel. The Sensor Package can be purchased in the Siemens Xcelerator Marketplace."", ""instructions"": [{""parameters"": {""title"": ""1. Please follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Set up the SINEC Security Guard Sensor"", ""description"": ""Detailed step for setting up the sensor.""}, {""title"": ""Create the Data Connector and configure it in the SINEC Security Guard web interface"", ""description"": ""Instructions on configuring the data connector.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SINEC%20Security%20Guard/Data%20Connectors/data_connector_GenericUI.json","true" +"","SOC Handbook","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SOC%20Handbook","microsoftsentinelcommunity","azure-sentinel-solution-sochandbook","2022-11-30","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","","","","","","","","false" +"SOCPrimeAuditLogs_CL","SOC Prime CCF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SOC%20Prime%20CCF","socprime","azure-sentinel-solution-socprimeauditccp","2025-09-25","","","SOC Prime","Partner","https://socprime.com/","","domains","SOCPrimeAuditLogsDataConnector","Microsoft","SOC Prime Platform Audit Logs Data Connector","The [SOC Prime Audit Logs](https://help.socprime.com/en/articles/6265791-api) data connector allows ingesting logs from the SOC Prime Platform API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the SOC Prime Platform API to fetch SOC Prime platform audit logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the SOC Prime Platform API \n Follow the instructions to obtain the credentials. you can also follow this [guide](https://help.socprime.com/en/articles/6265791-api#h_8a0d20b204) to generate personal API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Retrieve API Key\n 1. Log in to the SOC Prime Platform\n 2. Click [**Account**] icon -> [**Platform Settings**] -> [**API**] \n 3. Click [**Add New Key**] \n 4. In the modal that appears give your key a meaningful name, set expiration date and product APIs the key provides access to \n 5. Click on [**Generate**] \n 6. Copy the key and save it in a safe place. You won't be able to view it again once you close this modal ""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""SOC Prime API Key"", ""placeholder"": ""API Key"", ""type"": ""password"", ""name"": ""apitoken""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SOC%20Prime%20CCF/Data%20Connectors/SOCPrime_ccp/SOCPrime_DataConnectorDefinition.json","true" +"","SOC-Process-Framework","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SOC-Process-Framework","azuresentinel","azure-sentinel-solution-socprocessframework","2022-04-08","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"SailPointIDN_Events_CL","SailPointIdentityNow","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SailPointIdentityNow","sailpoint1582673310610","sentinel_offering","2021-10-26","","","SailPoint","Partner","","","domains","SailPointIdentityNow","SailPoint","SailPoint IdentityNow","The [SailPoint](https://www.sailpoint.com/) IdentityNow data connector provides the capability to ingest [SailPoint IdentityNow] search events into Microsoft Sentinel through the REST API. The connector provides customers the ability to extract audit information from their IdentityNow tenant. It is intended to make it even easier to bring IdentityNow user activity and governance events into Microsoft Sentinel to improve insights from your security incident and event monitoring solution.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the SailPoint IdentityNow REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the SailPoint IdentityNow API**\n\n [Follow the instructions](https://community.sailpoint.com/t5/IdentityNow-Articles/Best-Practice-Using-Personal-Access-Tokens-in-IdentityNow/ta-p/150471) to obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the SailPoint IdentityNow data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the SailPoint IdentityNow data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-sailpointidentitynow-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter other information and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the SailPoint IdentityNow data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-sailpointidentitynow-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. searcheventXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.9.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tTENANT_ID\n\t\tSHARED_KEY\n\t\tLIMIT\n\t\tGRANT_TYPE\n\t\tCUSTOMER_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tAZURE_STORAGE_ACCESS_KEY\n\t\tAZURE_STORAGE_ACCOUNT_NAME\n\t\tAzureWebJobsStorage\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SailPoint IdentityNow API Authentication Credentials"", ""description"": ""TENANT_ID, CLIENT_ID and CLIENT_SECRET are required for authentication.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SailPointIdentityNow/Data%20Connectors/SailPoint_IdentityNow_FunctionApp.json","true" +"SailPointIDN_Triggers_CL","SailPointIdentityNow","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SailPointIdentityNow","sailpoint1582673310610","sentinel_offering","2021-10-26","","","SailPoint","Partner","","","domains","SailPointIdentityNow","SailPoint","SailPoint IdentityNow","The [SailPoint](https://www.sailpoint.com/) IdentityNow data connector provides the capability to ingest [SailPoint IdentityNow] search events into Microsoft Sentinel through the REST API. The connector provides customers the ability to extract audit information from their IdentityNow tenant. It is intended to make it even easier to bring IdentityNow user activity and governance events into Microsoft Sentinel to improve insights from your security incident and event monitoring solution.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the SailPoint IdentityNow REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the SailPoint IdentityNow API**\n\n [Follow the instructions](https://community.sailpoint.com/t5/IdentityNow-Articles/Best-Practice-Using-Personal-Access-Tokens-in-IdentityNow/ta-p/150471) to obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the SailPoint IdentityNow data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the SailPoint IdentityNow data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-sailpointidentitynow-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter other information and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the SailPoint IdentityNow data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-sailpointidentitynow-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. searcheventXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.9.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tTENANT_ID\n\t\tSHARED_KEY\n\t\tLIMIT\n\t\tGRANT_TYPE\n\t\tCUSTOMER_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tAZURE_STORAGE_ACCESS_KEY\n\t\tAZURE_STORAGE_ACCOUNT_NAME\n\t\tAzureWebJobsStorage\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SailPoint IdentityNow API Authentication Credentials"", ""description"": ""TENANT_ID, CLIENT_ID and CLIENT_SECRET are required for authentication.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SailPointIdentityNow/Data%20Connectors/SailPoint_IdentityNow_FunctionApp.json","true" +"","SalemCyber","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SalemCyber","saleminc1627928803559","salem-cyber-ai-analyst","2023-07-21","2023-07-21","","Salem Cyber","Partner","https://www.salemcyber.com/contact","","domains","","","","","","","","false" +"SalesforceServiceCloudV2_CL","Salesforce Service Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Salesforce%20Service%20Cloud","azuresentinel","azure-sentinel-solution-salesforceservicecloud","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SalesforceServiceCloud","Salesforce","[DEPRECATED] Salesforce Service Cloud","The Salesforce Service Cloud data connector provides the capability to ingest information about your Salesforce operational events into Microsoft Sentinel through the REST API. The connector provides ability to review events in your org on an accelerated basis, get [event log files](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/event_log_file_hourly_overview.htm) in hourly increments for recent activity.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Salesforce Lightning Platform REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias SalesforceServiceCloud and load the function code or click [here](https://aka.ms/sentinel-SalesforceServiceCloud-parser). The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Salesforce Lightning Platform REST API**\n\n1. See the [link](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/quickstart.htm) and follow the instructions for obtaining Salesforce API Authorization credentials. \n2. On the **Set Up Authorization** step choose **Session ID Authorization** method.\n3. You must provide your client id, client secret, username, and password with user security token.""}, {""title"": """", ""description"": "">**NOTE:** Ingesting data from on an hourly interval may require additional licensing based on the edition of the Salesforce Service Cloud being used. Please refer to [Salesforce documentation](https://www.salesforce.com/editions-pricing/service-cloud/) and/or support for more details.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Salesforce Service Cloud data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Salesforce API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Salesforce Service Cloud data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SalesforceServiceCloud-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **Salesforce API Username**, **Salesforce API Password**, **Salesforce Security Token**, **Salesforce Consumer Key**, **Salesforce Consumer Secret** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Salesforce Service Cloud data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-SalesforceServiceCloud-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tSalesforceUser\n\t\tSalesforcePass\n\t\tSalesforceSecurityToken\n\t\tSalesforceConsumerKey\n\t\tSalesforceConsumerSecret\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`\n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Salesforce API Username**, **Salesforce API Password**, **Salesforce Security Token**, **Salesforce Consumer Key**, **Salesforce Consumer Secret** is required for REST API. [See the documentation to learn more about API](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/quickstart.htm).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Salesforce%20Service%20Cloud/Data%20Connectors/SalesforceServiceCloud_API_FunctionApp.json","true" +"SalesforceServiceCloud_CL","Salesforce Service Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Salesforce%20Service%20Cloud","azuresentinel","azure-sentinel-solution-salesforceservicecloud","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SalesforceServiceCloud","Salesforce","[DEPRECATED] Salesforce Service Cloud","The Salesforce Service Cloud data connector provides the capability to ingest information about your Salesforce operational events into Microsoft Sentinel through the REST API. The connector provides ability to review events in your org on an accelerated basis, get [event log files](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/event_log_file_hourly_overview.htm) in hourly increments for recent activity.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Salesforce Lightning Platform REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias SalesforceServiceCloud and load the function code or click [here](https://aka.ms/sentinel-SalesforceServiceCloud-parser). The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Salesforce Lightning Platform REST API**\n\n1. See the [link](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/quickstart.htm) and follow the instructions for obtaining Salesforce API Authorization credentials. \n2. On the **Set Up Authorization** step choose **Session ID Authorization** method.\n3. You must provide your client id, client secret, username, and password with user security token.""}, {""title"": """", ""description"": "">**NOTE:** Ingesting data from on an hourly interval may require additional licensing based on the edition of the Salesforce Service Cloud being used. Please refer to [Salesforce documentation](https://www.salesforce.com/editions-pricing/service-cloud/) and/or support for more details.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Salesforce Service Cloud data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Salesforce API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Salesforce Service Cloud data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SalesforceServiceCloud-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **Salesforce API Username**, **Salesforce API Password**, **Salesforce Security Token**, **Salesforce Consumer Key**, **Salesforce Consumer Secret** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Salesforce Service Cloud data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-SalesforceServiceCloud-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tSalesforceUser\n\t\tSalesforcePass\n\t\tSalesforceSecurityToken\n\t\tSalesforceConsumerKey\n\t\tSalesforceConsumerSecret\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`\n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Salesforce API Username**, **Salesforce API Password**, **Salesforce Security Token**, **Salesforce Consumer Key**, **Salesforce Consumer Secret** is required for REST API. [See the documentation to learn more about API](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/quickstart.htm).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Salesforce%20Service%20Cloud/Data%20Connectors/SalesforceServiceCloud_API_FunctionApp.json","true" +"SalesforceServiceCloudV2_CL","Salesforce Service Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Salesforce%20Service%20Cloud","azuresentinel","azure-sentinel-solution-salesforceservicecloud","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SalesforceServiceCloudCCPDefinition","Microsoft","Salesforce Service Cloud (via Codeless Connector Framework)","The Salesforce Service Cloud data connector provides the capability to ingest information about your Salesforce operational events into Microsoft Sentinel through the REST API. The connector provides ability to review events in your org on an accelerated basis, get [event log files](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/event_log_file_hourly_overview.htm) in hourly increments for recent activity.","[{""title"": ""Connect to Salesforce Service Cloud API to start collecting event logs in Microsoft Sentinel"", ""description"": ""Follow [Create a Connected App in Salesforce for OAuth](https://help.salesforce.com/s/articleView?id=platform.ev_relay_create_connected_app.htm&type=5) and [Configure a Connected App for the OAuth 2.0 Client Credentials Flow](https://help.salesforce.com/s/articleView?id=xcloud.connected_app_client_credentials_setup.htm&type=5) to create a Connected App with access to the Salesforce Service Cloud API. Through those instructions, you should get the Consumer Key and Consumer Secret.\n For Salesforce Domain name, Go to Setup, type My Domain in the Quick Find box, and select My Domain to view your domain details. Make sure to enter the domain name without a trailing slash (e.g., https://your-domain.my.salesforce.com). Fill the form below with that information."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Salesforce Domain Name"", ""placeholder"": ""Salesforce Domain Name"", ""type"": ""text"", ""name"": ""salesforceDomainName"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Log Collection Interval"", ""name"": ""queryType"", ""options"": [{""key"": ""SELECT Id,EventType,LogDate,Interval,CreatedDate,LogFile,LogFileLength FROM EventLogFile WHERE Interval='Hourly' and CreatedDate>{_QueryWindowStartTime} and CreatedDate<{_QueryWindowEndTime}"", ""text"": ""Hourly""}, {""key"": ""SELECT Id,EventType,LogDate,CreatedDate,LogFile,LogFileLength FROM EventLogFile WHERE CreatedDate>{_QueryWindowStartTime} and CreatedDate<{_QueryWindowEndTime}"", ""text"": ""Daily""}], ""placeholder"": ""Select an interval type"", ""isMultiSelect"": false, ""required"": true}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Consumer Key"", ""clientSecretLabel"": ""Consumer Secret"", ""clientIdPlaceholder"": ""Enter Connected App Consumer Key"", ""clientSecretPlaceholder"": ""Enter Connected App Consumer Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}], ""customs"": [{""name"": ""Salesforce Service Cloud API access"", ""description"": ""Access to the Salesforce Service Cloud API through a Connected App is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Salesforce%20Service%20Cloud/Data%20Connectors/SalesforceSentinelConnector_CCP/SalesforceServiceCloud_DataConnectorDefinition.json","true" +"Samsung_Knox_Application_CL","Samsung Knox Asset Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence","samsungelectronics1632791654245","azure-sentinel-solution-samsung-knox-kai","2025-01-15","","","Samsung Electronics Co., Ltd.","Partner","https://www2.samsungknox.com/en/support","","domains","SamsungDCDefinition","Samsung","Samsung Knox Asset Intelligence","Samsung Knox Asset Intelligence Data Connector lets you centralize your mobile security events and logs in order to view customized insights using the Workbook template, and identify incidents based on Analytics Rules templates.","[{""title"": """", ""description"": ""This Data Connector uses the Microsoft Log Ingestion API to push security events into Microsoft Sentinel from Samsung Knox Asset Intelligence (KAI) solution.""}, {""title"": ""STEP 1 - Create and register an Entra Application"", ""description"": "">**Note**: This Data Connector can support either Certificate-based or Client Secret-based authentication. For Certificate-based authentication, you can download the Samsung CA-signed certificate (public key) from [KAI documentation portal](https://docs.samsungknox.com/admin/knox-asset-intelligence/assets/samsung-knox-validation-certificate.crt). For Client Secret-based authentication, you can create the secret during the Entra application registration. Ensure you copy the Client Secret value as soon as it is generated.\n\n>**IMPORTANT:** Save the values for Tenant (Directory) ID and Client (Application) ID. If Client Secret-based authentication is enabled, save Client Secret (Secret Value) associated with the Entra app.""}, {""title"": ""STEP 2 - Automate deployment of this Data Connector using the below Azure Resource Manager (ARM) template"", ""description"": "">**IMPORTANT:** Before deploying the Data Connector, copy the below Workspace name associated with your Microsoft Sentinel (also your Log Analytics) instance."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""1. Click the button below to install Samsung Knox Intelligence Solution. \n\n\t[![DeployToAzure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SamsungDCDefinition-azuredeploy)\\n2. Provide the following required fields: Log Analytics Workspace Name, Log Analytics Workspace Location, Log Analytics Workspace Subscription (ID) and Log Analytics Workspace Resource Group.""}, {""title"": ""STEP 3 - Obtain Microsoft Sentinel Data Collection details"", ""description"": ""Once the ARM template is deployed, navigate to Data Collection Rules https://portal.azure.com/#browse/microsoft.insights%2Fdatacollectionrules? and save values associated with the Immutable ID (DCR) and Data Collection Endpoint (DCE). \n\n>**IMPORTANT:** To enable end-to-end integration, information related to Microsoft Sentinel DCE and DCR are required for configuration in Samsung Knox Asset Intelligence portal (STEP 4). \n\nEnsure the Entra Application created in STEP 1 has permissions to use the DCR created in order to send data to the DCE. Please refer to https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#assign-permissions-to-the-dcr to assign permissions accordingly.""}, {""title"": ""STEP 4 - Connect to Samsung Knox Asset Intelligence solution to configure Microsoft Sentinel to push select Knox Security Events as Alerts"", ""description"": ""1. Login to [Knox Asset Intelligence administration portal](https://central.samsungknox.com/kaiadmin/dai/home) and navigate to **Dashboard Settings**; this is available at the top-right corner of the Portal.\n> **Note**: Ensure the login user has access to 'Security' and 'Manage dashboard view and data collection' permissions.\n\n2. Click on Security tab to view settings for Microsoft Sentinel Integration and Knox Security Logs.\n\n3. In the Security Operations Integration page, toggle on **'Enable Microsoft Sentinel Integration'** and enter appropriate values in the required fields.\n\n >a. Based on the authentication method used, refer to information saved from STEP 1 while registering the Entra application. \n\n >b. For Microsoft Sentinel DCE and DCR, refer to the information saved from STEP 3. \n\n4. Click on **'Test Connection'** and ensure the connection is successful.\n\n5. Before you can Save, configure Knox Security Logs by selecting either Essential or Advanced configuration **(default: Essential).**\n\n6. To complete the Microsoft Sentinel integration, click **'Save'**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Entra app"", ""description"": ""An Entra app needs to be registered and provisioned with \u2018Microsoft Metrics Publisher\u2019 role and configured with either Certificate or Client Secret as credentials for secure data transfer. See [the Log ingestion tutorial to learn more about Entra App creation, registration and credential configuration.](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal) ""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence/Data%20Connectors/Template_Samsung.json","true" +"Samsung_Knox_Audit_CL","Samsung Knox Asset Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence","samsungelectronics1632791654245","azure-sentinel-solution-samsung-knox-kai","2025-01-15","","","Samsung Electronics Co., Ltd.","Partner","https://www2.samsungknox.com/en/support","","domains","SamsungDCDefinition","Samsung","Samsung Knox Asset Intelligence","Samsung Knox Asset Intelligence Data Connector lets you centralize your mobile security events and logs in order to view customized insights using the Workbook template, and identify incidents based on Analytics Rules templates.","[{""title"": """", ""description"": ""This Data Connector uses the Microsoft Log Ingestion API to push security events into Microsoft Sentinel from Samsung Knox Asset Intelligence (KAI) solution.""}, {""title"": ""STEP 1 - Create and register an Entra Application"", ""description"": "">**Note**: This Data Connector can support either Certificate-based or Client Secret-based authentication. For Certificate-based authentication, you can download the Samsung CA-signed certificate (public key) from [KAI documentation portal](https://docs.samsungknox.com/admin/knox-asset-intelligence/assets/samsung-knox-validation-certificate.crt). For Client Secret-based authentication, you can create the secret during the Entra application registration. Ensure you copy the Client Secret value as soon as it is generated.\n\n>**IMPORTANT:** Save the values for Tenant (Directory) ID and Client (Application) ID. If Client Secret-based authentication is enabled, save Client Secret (Secret Value) associated with the Entra app.""}, {""title"": ""STEP 2 - Automate deployment of this Data Connector using the below Azure Resource Manager (ARM) template"", ""description"": "">**IMPORTANT:** Before deploying the Data Connector, copy the below Workspace name associated with your Microsoft Sentinel (also your Log Analytics) instance."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""1. Click the button below to install Samsung Knox Intelligence Solution. \n\n\t[![DeployToAzure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SamsungDCDefinition-azuredeploy)\\n2. Provide the following required fields: Log Analytics Workspace Name, Log Analytics Workspace Location, Log Analytics Workspace Subscription (ID) and Log Analytics Workspace Resource Group.""}, {""title"": ""STEP 3 - Obtain Microsoft Sentinel Data Collection details"", ""description"": ""Once the ARM template is deployed, navigate to Data Collection Rules https://portal.azure.com/#browse/microsoft.insights%2Fdatacollectionrules? and save values associated with the Immutable ID (DCR) and Data Collection Endpoint (DCE). \n\n>**IMPORTANT:** To enable end-to-end integration, information related to Microsoft Sentinel DCE and DCR are required for configuration in Samsung Knox Asset Intelligence portal (STEP 4). \n\nEnsure the Entra Application created in STEP 1 has permissions to use the DCR created in order to send data to the DCE. Please refer to https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#assign-permissions-to-the-dcr to assign permissions accordingly.""}, {""title"": ""STEP 4 - Connect to Samsung Knox Asset Intelligence solution to configure Microsoft Sentinel to push select Knox Security Events as Alerts"", ""description"": ""1. Login to [Knox Asset Intelligence administration portal](https://central.samsungknox.com/kaiadmin/dai/home) and navigate to **Dashboard Settings**; this is available at the top-right corner of the Portal.\n> **Note**: Ensure the login user has access to 'Security' and 'Manage dashboard view and data collection' permissions.\n\n2. Click on Security tab to view settings for Microsoft Sentinel Integration and Knox Security Logs.\n\n3. In the Security Operations Integration page, toggle on **'Enable Microsoft Sentinel Integration'** and enter appropriate values in the required fields.\n\n >a. Based on the authentication method used, refer to information saved from STEP 1 while registering the Entra application. \n\n >b. For Microsoft Sentinel DCE and DCR, refer to the information saved from STEP 3. \n\n4. Click on **'Test Connection'** and ensure the connection is successful.\n\n5. Before you can Save, configure Knox Security Logs by selecting either Essential or Advanced configuration **(default: Essential).**\n\n6. To complete the Microsoft Sentinel integration, click **'Save'**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Entra app"", ""description"": ""An Entra app needs to be registered and provisioned with \u2018Microsoft Metrics Publisher\u2019 role and configured with either Certificate or Client Secret as credentials for secure data transfer. See [the Log ingestion tutorial to learn more about Entra App creation, registration and credential configuration.](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal) ""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence/Data%20Connectors/Template_Samsung.json","true" +"Samsung_Knox_Network_CL","Samsung Knox Asset Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence","samsungelectronics1632791654245","azure-sentinel-solution-samsung-knox-kai","2025-01-15","","","Samsung Electronics Co., Ltd.","Partner","https://www2.samsungknox.com/en/support","","domains","SamsungDCDefinition","Samsung","Samsung Knox Asset Intelligence","Samsung Knox Asset Intelligence Data Connector lets you centralize your mobile security events and logs in order to view customized insights using the Workbook template, and identify incidents based on Analytics Rules templates.","[{""title"": """", ""description"": ""This Data Connector uses the Microsoft Log Ingestion API to push security events into Microsoft Sentinel from Samsung Knox Asset Intelligence (KAI) solution.""}, {""title"": ""STEP 1 - Create and register an Entra Application"", ""description"": "">**Note**: This Data Connector can support either Certificate-based or Client Secret-based authentication. For Certificate-based authentication, you can download the Samsung CA-signed certificate (public key) from [KAI documentation portal](https://docs.samsungknox.com/admin/knox-asset-intelligence/assets/samsung-knox-validation-certificate.crt). For Client Secret-based authentication, you can create the secret during the Entra application registration. Ensure you copy the Client Secret value as soon as it is generated.\n\n>**IMPORTANT:** Save the values for Tenant (Directory) ID and Client (Application) ID. If Client Secret-based authentication is enabled, save Client Secret (Secret Value) associated with the Entra app.""}, {""title"": ""STEP 2 - Automate deployment of this Data Connector using the below Azure Resource Manager (ARM) template"", ""description"": "">**IMPORTANT:** Before deploying the Data Connector, copy the below Workspace name associated with your Microsoft Sentinel (also your Log Analytics) instance."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""1. Click the button below to install Samsung Knox Intelligence Solution. \n\n\t[![DeployToAzure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SamsungDCDefinition-azuredeploy)\\n2. Provide the following required fields: Log Analytics Workspace Name, Log Analytics Workspace Location, Log Analytics Workspace Subscription (ID) and Log Analytics Workspace Resource Group.""}, {""title"": ""STEP 3 - Obtain Microsoft Sentinel Data Collection details"", ""description"": ""Once the ARM template is deployed, navigate to Data Collection Rules https://portal.azure.com/#browse/microsoft.insights%2Fdatacollectionrules? and save values associated with the Immutable ID (DCR) and Data Collection Endpoint (DCE). \n\n>**IMPORTANT:** To enable end-to-end integration, information related to Microsoft Sentinel DCE and DCR are required for configuration in Samsung Knox Asset Intelligence portal (STEP 4). \n\nEnsure the Entra Application created in STEP 1 has permissions to use the DCR created in order to send data to the DCE. Please refer to https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#assign-permissions-to-the-dcr to assign permissions accordingly.""}, {""title"": ""STEP 4 - Connect to Samsung Knox Asset Intelligence solution to configure Microsoft Sentinel to push select Knox Security Events as Alerts"", ""description"": ""1. Login to [Knox Asset Intelligence administration portal](https://central.samsungknox.com/kaiadmin/dai/home) and navigate to **Dashboard Settings**; this is available at the top-right corner of the Portal.\n> **Note**: Ensure the login user has access to 'Security' and 'Manage dashboard view and data collection' permissions.\n\n2. Click on Security tab to view settings for Microsoft Sentinel Integration and Knox Security Logs.\n\n3. In the Security Operations Integration page, toggle on **'Enable Microsoft Sentinel Integration'** and enter appropriate values in the required fields.\n\n >a. Based on the authentication method used, refer to information saved from STEP 1 while registering the Entra application. \n\n >b. For Microsoft Sentinel DCE and DCR, refer to the information saved from STEP 3. \n\n4. Click on **'Test Connection'** and ensure the connection is successful.\n\n5. Before you can Save, configure Knox Security Logs by selecting either Essential or Advanced configuration **(default: Essential).**\n\n6. To complete the Microsoft Sentinel integration, click **'Save'**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Entra app"", ""description"": ""An Entra app needs to be registered and provisioned with \u2018Microsoft Metrics Publisher\u2019 role and configured with either Certificate or Client Secret as credentials for secure data transfer. See [the Log ingestion tutorial to learn more about Entra App creation, registration and credential configuration.](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal) ""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence/Data%20Connectors/Template_Samsung.json","true" +"Samsung_Knox_Process_CL","Samsung Knox Asset Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence","samsungelectronics1632791654245","azure-sentinel-solution-samsung-knox-kai","2025-01-15","","","Samsung Electronics Co., Ltd.","Partner","https://www2.samsungknox.com/en/support","","domains","SamsungDCDefinition","Samsung","Samsung Knox Asset Intelligence","Samsung Knox Asset Intelligence Data Connector lets you centralize your mobile security events and logs in order to view customized insights using the Workbook template, and identify incidents based on Analytics Rules templates.","[{""title"": """", ""description"": ""This Data Connector uses the Microsoft Log Ingestion API to push security events into Microsoft Sentinel from Samsung Knox Asset Intelligence (KAI) solution.""}, {""title"": ""STEP 1 - Create and register an Entra Application"", ""description"": "">**Note**: This Data Connector can support either Certificate-based or Client Secret-based authentication. For Certificate-based authentication, you can download the Samsung CA-signed certificate (public key) from [KAI documentation portal](https://docs.samsungknox.com/admin/knox-asset-intelligence/assets/samsung-knox-validation-certificate.crt). For Client Secret-based authentication, you can create the secret during the Entra application registration. Ensure you copy the Client Secret value as soon as it is generated.\n\n>**IMPORTANT:** Save the values for Tenant (Directory) ID and Client (Application) ID. If Client Secret-based authentication is enabled, save Client Secret (Secret Value) associated with the Entra app.""}, {""title"": ""STEP 2 - Automate deployment of this Data Connector using the below Azure Resource Manager (ARM) template"", ""description"": "">**IMPORTANT:** Before deploying the Data Connector, copy the below Workspace name associated with your Microsoft Sentinel (also your Log Analytics) instance."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""1. Click the button below to install Samsung Knox Intelligence Solution. \n\n\t[![DeployToAzure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SamsungDCDefinition-azuredeploy)\\n2. Provide the following required fields: Log Analytics Workspace Name, Log Analytics Workspace Location, Log Analytics Workspace Subscription (ID) and Log Analytics Workspace Resource Group.""}, {""title"": ""STEP 3 - Obtain Microsoft Sentinel Data Collection details"", ""description"": ""Once the ARM template is deployed, navigate to Data Collection Rules https://portal.azure.com/#browse/microsoft.insights%2Fdatacollectionrules? and save values associated with the Immutable ID (DCR) and Data Collection Endpoint (DCE). \n\n>**IMPORTANT:** To enable end-to-end integration, information related to Microsoft Sentinel DCE and DCR are required for configuration in Samsung Knox Asset Intelligence portal (STEP 4). \n\nEnsure the Entra Application created in STEP 1 has permissions to use the DCR created in order to send data to the DCE. Please refer to https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#assign-permissions-to-the-dcr to assign permissions accordingly.""}, {""title"": ""STEP 4 - Connect to Samsung Knox Asset Intelligence solution to configure Microsoft Sentinel to push select Knox Security Events as Alerts"", ""description"": ""1. Login to [Knox Asset Intelligence administration portal](https://central.samsungknox.com/kaiadmin/dai/home) and navigate to **Dashboard Settings**; this is available at the top-right corner of the Portal.\n> **Note**: Ensure the login user has access to 'Security' and 'Manage dashboard view and data collection' permissions.\n\n2. Click on Security tab to view settings for Microsoft Sentinel Integration and Knox Security Logs.\n\n3. In the Security Operations Integration page, toggle on **'Enable Microsoft Sentinel Integration'** and enter appropriate values in the required fields.\n\n >a. Based on the authentication method used, refer to information saved from STEP 1 while registering the Entra application. \n\n >b. For Microsoft Sentinel DCE and DCR, refer to the information saved from STEP 3. \n\n4. Click on **'Test Connection'** and ensure the connection is successful.\n\n5. Before you can Save, configure Knox Security Logs by selecting either Essential or Advanced configuration **(default: Essential).**\n\n6. To complete the Microsoft Sentinel integration, click **'Save'**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Entra app"", ""description"": ""An Entra app needs to be registered and provisioned with \u2018Microsoft Metrics Publisher\u2019 role and configured with either Certificate or Client Secret as credentials for secure data transfer. See [the Log ingestion tutorial to learn more about Entra App creation, registration and credential configuration.](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal) ""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence/Data%20Connectors/Template_Samsung.json","true" +"Samsung_Knox_System_CL","Samsung Knox Asset Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence","samsungelectronics1632791654245","azure-sentinel-solution-samsung-knox-kai","2025-01-15","","","Samsung Electronics Co., Ltd.","Partner","https://www2.samsungknox.com/en/support","","domains","SamsungDCDefinition","Samsung","Samsung Knox Asset Intelligence","Samsung Knox Asset Intelligence Data Connector lets you centralize your mobile security events and logs in order to view customized insights using the Workbook template, and identify incidents based on Analytics Rules templates.","[{""title"": """", ""description"": ""This Data Connector uses the Microsoft Log Ingestion API to push security events into Microsoft Sentinel from Samsung Knox Asset Intelligence (KAI) solution.""}, {""title"": ""STEP 1 - Create and register an Entra Application"", ""description"": "">**Note**: This Data Connector can support either Certificate-based or Client Secret-based authentication. For Certificate-based authentication, you can download the Samsung CA-signed certificate (public key) from [KAI documentation portal](https://docs.samsungknox.com/admin/knox-asset-intelligence/assets/samsung-knox-validation-certificate.crt). For Client Secret-based authentication, you can create the secret during the Entra application registration. Ensure you copy the Client Secret value as soon as it is generated.\n\n>**IMPORTANT:** Save the values for Tenant (Directory) ID and Client (Application) ID. If Client Secret-based authentication is enabled, save Client Secret (Secret Value) associated with the Entra app.""}, {""title"": ""STEP 2 - Automate deployment of this Data Connector using the below Azure Resource Manager (ARM) template"", ""description"": "">**IMPORTANT:** Before deploying the Data Connector, copy the below Workspace name associated with your Microsoft Sentinel (also your Log Analytics) instance."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""1. Click the button below to install Samsung Knox Intelligence Solution. \n\n\t[![DeployToAzure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SamsungDCDefinition-azuredeploy)\\n2. Provide the following required fields: Log Analytics Workspace Name, Log Analytics Workspace Location, Log Analytics Workspace Subscription (ID) and Log Analytics Workspace Resource Group.""}, {""title"": ""STEP 3 - Obtain Microsoft Sentinel Data Collection details"", ""description"": ""Once the ARM template is deployed, navigate to Data Collection Rules https://portal.azure.com/#browse/microsoft.insights%2Fdatacollectionrules? and save values associated with the Immutable ID (DCR) and Data Collection Endpoint (DCE). \n\n>**IMPORTANT:** To enable end-to-end integration, information related to Microsoft Sentinel DCE and DCR are required for configuration in Samsung Knox Asset Intelligence portal (STEP 4). \n\nEnsure the Entra Application created in STEP 1 has permissions to use the DCR created in order to send data to the DCE. Please refer to https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#assign-permissions-to-the-dcr to assign permissions accordingly.""}, {""title"": ""STEP 4 - Connect to Samsung Knox Asset Intelligence solution to configure Microsoft Sentinel to push select Knox Security Events as Alerts"", ""description"": ""1. Login to [Knox Asset Intelligence administration portal](https://central.samsungknox.com/kaiadmin/dai/home) and navigate to **Dashboard Settings**; this is available at the top-right corner of the Portal.\n> **Note**: Ensure the login user has access to 'Security' and 'Manage dashboard view and data collection' permissions.\n\n2. Click on Security tab to view settings for Microsoft Sentinel Integration and Knox Security Logs.\n\n3. In the Security Operations Integration page, toggle on **'Enable Microsoft Sentinel Integration'** and enter appropriate values in the required fields.\n\n >a. Based on the authentication method used, refer to information saved from STEP 1 while registering the Entra application. \n\n >b. For Microsoft Sentinel DCE and DCR, refer to the information saved from STEP 3. \n\n4. Click on **'Test Connection'** and ensure the connection is successful.\n\n5. Before you can Save, configure Knox Security Logs by selecting either Essential or Advanced configuration **(default: Essential).**\n\n6. To complete the Microsoft Sentinel integration, click **'Save'**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Entra app"", ""description"": ""An Entra app needs to be registered and provisioned with \u2018Microsoft Metrics Publisher\u2019 role and configured with either Certificate or Client Secret as credentials for secure data transfer. See [the Log ingestion tutorial to learn more about Entra App creation, registration and credential configuration.](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal) ""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence/Data%20Connectors/Template_Samsung.json","true" +"Samsung_Knox_User_CL","Samsung Knox Asset Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence","samsungelectronics1632791654245","azure-sentinel-solution-samsung-knox-kai","2025-01-15","","","Samsung Electronics Co., Ltd.","Partner","https://www2.samsungknox.com/en/support","","domains","SamsungDCDefinition","Samsung","Samsung Knox Asset Intelligence","Samsung Knox Asset Intelligence Data Connector lets you centralize your mobile security events and logs in order to view customized insights using the Workbook template, and identify incidents based on Analytics Rules templates.","[{""title"": """", ""description"": ""This Data Connector uses the Microsoft Log Ingestion API to push security events into Microsoft Sentinel from Samsung Knox Asset Intelligence (KAI) solution.""}, {""title"": ""STEP 1 - Create and register an Entra Application"", ""description"": "">**Note**: This Data Connector can support either Certificate-based or Client Secret-based authentication. For Certificate-based authentication, you can download the Samsung CA-signed certificate (public key) from [KAI documentation portal](https://docs.samsungknox.com/admin/knox-asset-intelligence/assets/samsung-knox-validation-certificate.crt). For Client Secret-based authentication, you can create the secret during the Entra application registration. Ensure you copy the Client Secret value as soon as it is generated.\n\n>**IMPORTANT:** Save the values for Tenant (Directory) ID and Client (Application) ID. If Client Secret-based authentication is enabled, save Client Secret (Secret Value) associated with the Entra app.""}, {""title"": ""STEP 2 - Automate deployment of this Data Connector using the below Azure Resource Manager (ARM) template"", ""description"": "">**IMPORTANT:** Before deploying the Data Connector, copy the below Workspace name associated with your Microsoft Sentinel (also your Log Analytics) instance."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""1. Click the button below to install Samsung Knox Intelligence Solution. \n\n\t[![DeployToAzure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SamsungDCDefinition-azuredeploy)\\n2. Provide the following required fields: Log Analytics Workspace Name, Log Analytics Workspace Location, Log Analytics Workspace Subscription (ID) and Log Analytics Workspace Resource Group.""}, {""title"": ""STEP 3 - Obtain Microsoft Sentinel Data Collection details"", ""description"": ""Once the ARM template is deployed, navigate to Data Collection Rules https://portal.azure.com/#browse/microsoft.insights%2Fdatacollectionrules? and save values associated with the Immutable ID (DCR) and Data Collection Endpoint (DCE). \n\n>**IMPORTANT:** To enable end-to-end integration, information related to Microsoft Sentinel DCE and DCR are required for configuration in Samsung Knox Asset Intelligence portal (STEP 4). \n\nEnsure the Entra Application created in STEP 1 has permissions to use the DCR created in order to send data to the DCE. Please refer to https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#assign-permissions-to-the-dcr to assign permissions accordingly.""}, {""title"": ""STEP 4 - Connect to Samsung Knox Asset Intelligence solution to configure Microsoft Sentinel to push select Knox Security Events as Alerts"", ""description"": ""1. Login to [Knox Asset Intelligence administration portal](https://central.samsungknox.com/kaiadmin/dai/home) and navigate to **Dashboard Settings**; this is available at the top-right corner of the Portal.\n> **Note**: Ensure the login user has access to 'Security' and 'Manage dashboard view and data collection' permissions.\n\n2. Click on Security tab to view settings for Microsoft Sentinel Integration and Knox Security Logs.\n\n3. In the Security Operations Integration page, toggle on **'Enable Microsoft Sentinel Integration'** and enter appropriate values in the required fields.\n\n >a. Based on the authentication method used, refer to information saved from STEP 1 while registering the Entra application. \n\n >b. For Microsoft Sentinel DCE and DCR, refer to the information saved from STEP 3. \n\n4. Click on **'Test Connection'** and ensure the connection is successful.\n\n5. Before you can Save, configure Knox Security Logs by selecting either Essential or Advanced configuration **(default: Essential).**\n\n6. To complete the Microsoft Sentinel integration, click **'Save'**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Entra app"", ""description"": ""An Entra app needs to be registered and provisioned with \u2018Microsoft Metrics Publisher\u2019 role and configured with either Certificate or Client Secret as credentials for secure data transfer. See [the Log ingestion tutorial to learn more about Entra App creation, registration and credential configuration.](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal) ""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence/Data%20Connectors/Template_Samsung.json","true" +"ABAPAuditLog","SecurityBridge App","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityBridge%20App","securitybridge1647511278080","securitybridge-sentinel-app-1","2022-02-17","","","SecurityBridge","Partner","https://securitybridge.com/contact/","","domains,verticals","SecurityBridge","SecurityBridge Group GmbH","SecurityBridge Solution for SAP","SecurityBridge enhances SAP security by integrating seamlessly with Microsoft Sentinel, enabling real-time monitoring and threat detection across SAP environments. This integration allows Security Operations Centers (SOCs) to consolidate SAP security events with other organizational data, providing a unified view of the threat landscape . Leveraging AI-powered analytics and Microsoft’s Security Copilot, SecurityBridge identifies sophisticated attack patterns and vulnerabilities within SAP applications, including ABAP code scanning and configuration assessments . The solution supports scalable deployments across complex SAP landscapes, whether on-premises, in the cloud, or hybrid environments . By bridging the gap between IT and SAP security teams, SecurityBridge empowers organizations to proactively detect, investigate, and respond to threats, enhancing overall security posture.","[{""title"": ""1. Create ARM Resources and Provide the Required Permissions"", ""description"": ""We will create data collection rule (DCR) and data collection endpoint (DCE) resources. We will also create a Microsoft Entra app registration and assign the required permissions to it."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated deployment of Azure resources\nClicking on \""Deploy push connector resources\"" will trigger the creation of DCR and DCE resources.\nIt will then create a Microsoft Entra app registration with client secret and grant permissions on the DCR. This setup enables data to be sent securely to the DCR using a OAuth v2 client credentials.""}}, {""parameters"": {""label"": ""Deploy push connector resources"", ""applicationDisplayName"": ""SecurityBridge Solution for SAP""}, ""type"": ""DeployPushConnectorButton_test""}]}, {""title"": ""2. Maintain the data collection endpoint details and authentication info in SecurityBridge"", ""description"": ""Share the data collection endpoint URL and authentication info with the SecurityBridge administrator to configure the Securitybridge to send data to the data collection endpoint.\n\nLearn more from our KB Page https://abap-experts.atlassian.net/wiki/spaces/SB/pages/4099309579/REST+Push+Interface"", ""instructions"": [{""parameters"": {""label"": ""Use this value to configure as Tenant ID in the LogIngestionAPI credential."", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra Application ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra Application Secret"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the Application Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Use this value to configure the LogsIngestionURL parameter when deploying the IFlow."", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the DCE URI""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""DCR Immutable ID"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the DCR ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Sentinel for SAP Stream ID"", ""value"": ""SAP_ABAPAUDITLOG""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""SecurityBridge_CL Stream ID"", ""value"": ""Custom-SecurityBridge_CL""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rules. Typically requires Azure RBAC Owner or User Access Administrator role.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityBridge%20App/Data%20Connectors/SecurityBridge_PUSH_CCP/SecurityBridge_connectorDefinition.json","true" +"SecurityBridgeLogs_CL","SecurityBridge App","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityBridge%20App","securitybridge1647511278080","securitybridge-sentinel-app-1","2022-02-17","","","SecurityBridge","Partner","https://securitybridge.com/contact/","","domains,verticals","SecurityBridgeSAP","SecurityBridge","SecurityBridge Threat Detection for SAP","SecurityBridge is the first and only holistic, natively integrated security platform, addressing all aspects needed to protect organizations running SAP from internal and external threats against their core business applications. The SecurityBridge platform is an SAP-certified add-on, used by organizations around the globe, and addresses the clients’ need for advanced cybersecurity, real-time monitoring, compliance, code security, and patching to protect against internal and external threats.This Microsoft Sentinel Solution allows you to integrate SecurityBridge Threat Detection events from all your on-premise and cloud based SAP instances into your security monitoring.Use this Microsoft Sentinel Solution to receive normalized and speaking security events, pre-built dashboards and out-of-the-box templates for your SAP security monitoring.","[{""title"": """", ""description"": ""*NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias SecurityBridgeLogs and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityBridge%20App/Parsers/SecurityBridgeLogs.txt).The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using SecurityBridge Application Platform 7.4.0."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""This solution requires logs collection via an Microsoft Sentinel agent installation\n\n> The Microsoft Sentinel agent is supported on the following Operating Systems: \n1. Windows Servers \n2. SUSE Linux Enterprise Server\n3. Redhat Linux Enterprise Server\n4. Oracle Linux Enterprise Server\n5. If you have the SAP solution installed on HPUX / AIX then you will need to deploy a log collector on one of the Linux options listed above and forward your logs to that collector\n\n"", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the custom log directory to be collected"", ""instructions"": [{""parameters"": {""linkType"": ""OpenCustomLogsSettings""}, ""type"": ""InstallAgent""}]}, {""description"": ""1. Select the link above to open your workspace advanced settings \n2. Click **+Add custom**\n3. Click **Browse** to upload a sample of a SecurityBridge SAP log file (e.g. AED_20211129164544.cef). Then, click **Next >**\n4. Select **New Line** as the record delimiter then click **Next >**\n5. Select **Windows** or **Linux** and enter the path to SecurityBridge logs based on your configuration. Example:\n - '/usr/sap/tmp/sb_events/*.cef' \n\n>**NOTE:** You can add as many paths as you want in the configuration.\n\n6. After entering the path, click the '+' symbol to apply, then click **Next >** \n7. Add **SecurityBridgeLogs** as the custom log Name and click **Done**""}, {""title"": ""3. Check logs in Microsoft Sentinel"", ""description"": ""Open Log Analytics to check if the logs are received using the SecurityBridgeLogs_CL Custom log table.\n\n>**NOTE:** It may take up to 30 minutes before new logs will appear in SecurityBridgeLogs_CL table."", ""instructions"": []}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityBridge%20App/Data%20Connectors/Connector_SecurityBridge.json","true" +"SecurityScorecardFactor_CL","SecurityScorecard Cybersecurity Ratings","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityScorecard%20Cybersecurity%20Ratings","SecurityScorecard","SecurityScorecard","2022-10-01","2022-10-01","","SecurityScorecard","Partner","https://support.securityscorecard.com/hc/en-us/requests/new","","domains","SecurityScorecardFactorAzureFunctions","SecurityScorecard","SecurityScorecard Factor","SecurityScorecard is the leader in cybersecurity risk ratings. The [SecurityScorecard](https://www.SecurityScorecard.com/) Factors data connector provides the ability for Sentinel to import SecurityScorecard factor ratings as logs. SecurityScorecard provides ratings for over 12 million companies and domains using countless data points from across the internet. Maintain full awareness of any company's security posture and be able to receive timely updates when factor scores change or drop. SecurityScorecard factor ratings are updated daily based on evidence collected across the web.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the SecurityScorecard API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the SecurityScorecard API**\n\n Follow these instructions to create/get a SecurityScorecard API token.\n 1. As an administrator in SecurityScorecard, navigate to My Settings and then Users\n 2. Click '+ Add User'\n 3. In the form, check off 'Check to create a bot user'\n 4. Provide a name for the Bot and provide it with Read Only permission\n 5. Click 'Add User'\n 6. Locate the newly created Bot user\n 7. Click 'create token' in the Bot user's row\n 8. Click 'Confirm' and note the API token that has been generated""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the SecurityScorecard Factor data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the SecurityScorecard API Authorization Key(s)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the SecurityScorecard Factor connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SecurityScorecardFactorAPI-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tSecurityScorecard API Key \n\t\tSecurityScorecard Base URL (https://api.securityscorecard.io) \n\t\tDomain \n\t\tPortfolio IDs (Coma separated IDs) \n\t\tSecurityScorecard Factor Table Name (Default: SecurityScorecardFactor) \n\t\tLevel Factor Change (Default: 7) \n\t\tFactor Schedule (Default: 0 15 * * * *) \n\t\tDiff Override Own Factor (Default: true) \n\t\tDiff Override Portfolio Factor (Default: true) \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the SecurityScorecard Factor data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-SecurityScorecardFactorAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. SecurityScorecardXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tSecurityScorecard API Key \n\t\tSecurityScorecard Base URL (https://api.securityscorecard.io) \n\t\tDomain \n\t\tPortfolio IDs (Coma separated IDs) \n\t\tSecurityScorecard Factor Table Name (Default: SecurityScorecardFactor) \n\t\tLevel Factor Change (Default: 7) \n\t\tFactor Schedule (Default: 0 15 * * * *) \n\t\tDiff Override Own Factor (Default: true) \n\t\tDiff Override Portfolio Factor (Default: true) \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**SecurityScorecard API Key** is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityScorecard%20Cybersecurity%20Ratings/Data%20Connectors/SecurityScorecardFactor/SecurityScorecardFactor_API_FunctionApp.json","true" +"SecurityScorecardIssues_CL","SecurityScorecard Cybersecurity Ratings","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityScorecard%20Cybersecurity%20Ratings","SecurityScorecard","SecurityScorecard","2022-10-01","2022-10-01","","SecurityScorecard","Partner","https://support.securityscorecard.com/hc/en-us/requests/new","","domains","SecurityScorecardIssueAzureFunctions","SecurityScorecard","SecurityScorecard Issue","SecurityScorecard is the leader in cybersecurity risk ratings. The [SecurityScorecard](https://www.SecurityScorecard.com/) Issues data connector provides the ability for Sentinel to import SecurityScorecard issue data as logs. SecurityScorecard provides ratings for over 12 million companies and domains using countless data points from across the internet. Maintain full awareness of any company's security posture and be able to receive timely updates when new cybersecurity issues are discovered.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the SecurityScorecard API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the SecurityScorecard API**\n\n Follow these instructions to create/get a SecurityScorecard API token.\n 1. As an administrator in SecurityScorecard, navigate to My Settings and then Users\n 2. Click '+ Add User'\n 3. In the form, check off 'Check to create a bot user'\n 4. Provide a name for the Bot and provide it with Read Only permission\n 5. Click 'Add User'\n 6. Locate the newly created Bot user\n 7. Click 'create token' in the Bot user's row\n 8. Click 'Confirm' and note the API token that has been generated""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the SecurityScorecard Issue data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the SecurityScorecard API Authorization Key(s)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the SecurityScorecard Issue connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SecurityScorecardIssueAPI-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tSecurityScorecard API Key \n\t\tSecurityScorecard Base URL (https://api.securityscorecard.io) \n\t\tDomain \n\t\tPortfolio IDs (Coma separated IDs) \n\t\tSecurityScorecard Issue Table Name (Default: SecurityScorecardIssue) \n\t\tLevel Issue Change (Default: 7) \n\t\tIssue Schedule (Default: 0 0,30 * * * *) \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the SecurityScorecard Issue data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-SecurityScorecardIssueAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. SecurityScorecardXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tSecurityScorecard API Key \n\t\tSecurityScorecard Base URL (https://api.securityscorecard.io) \n\t\tDomain \n\t\tPortfolio IDs (Coma separated IDs) \n\t\tSecurityScorecard Issue Table Name (Default: SecurityScorecardIssue) \n\t\tLevel Issue Change (Default: 7) \n\t\tIssue Schedule (Default: 0 0,30 * * * *) \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**SecurityScorecard API Key** is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityScorecard%20Cybersecurity%20Ratings/Data%20Connectors/SecurityScorecardIssue/SecurityScorecardIssue_API_FunctionApp.json","true" +"SecurityScorecardRatings_CL","SecurityScorecard Cybersecurity Ratings","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityScorecard%20Cybersecurity%20Ratings","SecurityScorecard","SecurityScorecard","2022-10-01","2022-10-01","","SecurityScorecard","Partner","https://support.securityscorecard.com/hc/en-us/requests/new","","domains","SecurityScorecardRatingsAzureFunctions","SecurityScorecard","SecurityScorecard Cybersecurity Ratings","SecurityScorecard is the leader in cybersecurity risk ratings. The [SecurityScorecard](https://www.SecurityScorecard.com/) data connector provides the ability for Sentinel to import SecurityScorecard ratings as logs. SecurityScorecard provides ratings for over 12 million companies and domains using countless data points from across the internet. Maintain full awareness of any company's security posture and be able to receive timely updates when scores change or drop. SecurityScorecard ratings are updated daily based on evidence collected across the web.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the SecurityScorecard API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the SecurityScorecard API**\n\n Follow these instructions to create/get a SecurityScorecard API token.\n 1. As an administrator in SecurityScorecard, navigate to My Settings and then Users\n 2. Click '+ Add User'\n 3. In the form, check off 'Check to create a bot user'\n 4. Provide a name for the Bot and provide it with Read Only permission\n 5. Click 'Add User'\n 6. Locate the newly created Bot user\n 7. Click 'create token' in the Bot user's row\n 8. Click 'Confirm' and note the API token that has been generated""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the SecurityScorecard Ratings data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the SecurityScorecard API Authorization Key(s)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the SecurityScorecard Ratings connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SecurityScorecardRatingsAPI-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tSecurityScorecard API Key \n\t\tSecurityScorecard Base URL (https://api.securityscorecard.io) \n\t\tDomain \n\t\tPortfolio IDs (Coma separated IDs) \n\t\tSecurityScorecard Ratings Table Name (Default: SecurityScorecardRatings) \n\t\tLevel Ratings Change (Default: 7) \n\t\tRatings Schedule (Default: 0 45 * * * *) \n\t\tDiff Override Own Ratings (Default: true) \n\t\tDiff Override Portfolio Ratings (Default: true) \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the SecurityScorecard Ratings data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-SecurityScorecardRatingsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. SecurityScorecardXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tSecurityScorecard API Key \n\t\tSecurityScorecard Base URL (https://api.securityscorecard.io) \n\t\tDomain \n\t\tPortfolio IDs (Coma separated IDs) \n\t\tSecurityScorecard Ratings Table Name (Default: SecurityScorecardRatings) \n\t\tLevel Ratings Change (Default: 7) \n\t\tRatings Schedule (Default: 0 45 * * * *) \n\t\tDiff Override Own Ratings (Default: true) \n\t\tDiff Override Portfolio Ratings (Default: true) \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**SecurityScorecard API Key** is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityScorecard%20Cybersecurity%20Ratings/Data%20Connectors/SecurityScorecardRatings/SecurityScorecardRatings_API_FunctionApp.json","true" +"","SecurityThreatEssentialSolution","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityThreatEssentialSolution","azuresentinel","azure-sentinel-solution-securitythreatessentialsol","2022-03-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"SecurityEvent","Semperis Directory Services Protector","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Semperis%20Directory%20Services%20Protector","semperis","directory-services-protector-solution","2021-10-18","","","Semperis","Partner","https://www.semperis.com/contact-us/","","domains","SemperisDSP","SEMPERIS","Semperis Directory Services Protector","Semperis Directory Services Protector data connector allows for the export of its Windows event logs (i.e. Indicators of Exposure and Indicators of Compromise) to Microsoft Sentinel in real time.
It provides a data parser to manipulate the Windows event logs more easily. The different workbooks ease your Active Directory security monitoring and provide different ways to visualize the data. The analytic templates allow to automate responses regarding different events, exposures, or attacks.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**dsp_parser**](https://aka.ms/sentinel-SemperisDSP-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""**Configure Windows Security Events via AMA connector**"", ""description"": ""Collect Windows security events logs from your **Semperis DSP Management Server** .""}, {""title"": ""1. Install the Azure Monitor Agent (AMA)"", ""description"": ""On your **Semperis DSP Management Server** install the AMA on the DSP machine that will act as the event log forwarder.\nYou can skip this step if you have already installed the Microsoft agent for Windows""}, {""title"": ""2. Create a Data Collection Rule (DCR)"", ""description"": ""Start collecting logs from the **Semperis DSP Management Server** .\n\n1. In the Azure portal, navigate to your **Log Analytics workspace**.\n2. In the left pane, click on **Configuration** and then **Data connectors**.\n3. Find and install the **the Windows Security Events via AMA** connector.\n4. Click on **Open connector** and then on **Create data collection rule**.\n5. Configure the DCR with the necessary details, such as the log sources and the destination workspace."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Semperis DSP Management Server"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""**Configure Common Event Format via AMA connector**"", ""description"": ""Collect syslog messages send from your **Semperis DSP Management Server** .""}, {""title"": ""1. Install the Azure Monitor Agent (AMA)"", ""description"": ""Install the AMA on the Linux machine that will act as the log forwarder. This machine will collect and forward CEF logs to Microsoft Sentinel.\nYou can skip this step if you have already installed the Microsoft agent for Linux""}, {""title"": ""2. Create a Data Collection Rule (DCR)"", ""description"": ""Start collecting logs from the **Semperis DSP Management Server** .\n\n1. In the Azure portal, navigate to your **Log Analytics workspace**.\n2. In the left pane, click on **Configuration** and then **Data connectors**.\n3. Find and install the **the Common Event Format via AMA** connector.\n4. Click on **Open connector** and then on **Create data collection rule**.\n5. Configure the DCR with the necessary details, such as the log sources and the destination workspace."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Semperis DSP Management Server"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""3. Configure sending CEF logs on your Semperis DSP Management Server"", ""description"": ""Configure your **Semperis DSP Management Server** to send CEF logs to the Linux machine where the AMA is installed. This involves setting the destination IP address and port for the CEF logs""}, {""title"": """", ""description"": ""> You should now be able to receive logs in the *Windows event log* table and *common log* table, log data can be parsed using the **dsp_parser()** function, used by all query samples, workbooks and analytic templates.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Semperis%20Directory%20Services%20Protector/Data%20Connectors/SemperisDSP-connector.json","true" +"SenservaPro_CL","SenservaPro","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SenservaPro","senservallc","senservapro4sentinel","2022-06-01","","","Senserva","Partner","https://www.senserva.com/contact/","","domains","SenservaPro","Senserva","SenservaPro (Preview)","The SenservaPro data connector provides a viewing experience for your SenservaPro scanning logs. View dashboards of your data, use queries to hunt & explore, and create custom alerts.","[{""title"": ""1. Setup the data connection"", ""description"": ""Visit [Senserva Setup](https://www.senserva.com/senserva-microsoft-sentinel-edition-setup/) for information on setting up the Senserva data connection, support, or any other questions. The Senserva installation will configure a Log Analytics Workspace for output. Deploy Microsoft Sentinel onto the configured Log Analytics Workspace to finish the data connection setup by following [this onboarding guide.](https://docs.microsoft.com/azure/sentinel/quickstart-onboard)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SenservaPro/Data%20Connectors/SenservaPro.json","true" +"SentinelOne_CL","SentinelOne","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne","azuresentinel","azure-sentinel-solution-sentinelone","2024-11-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SentinelOne","SentinelOne","SentinelOne","The [SentinelOne](https://www.sentinelone.com/) data connector provides the capability to ingest common SentinelOne server objects such as Threats, Agents, Applications, Activities, Policies, Groups, and more events into Microsoft Sentinel through the REST API. Refer to API documentation: `https://.sentinelone.net/api-doc/overview` for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the SentinelOne API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias SentinelOne and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne/Parsers/SentinelOne.txt). The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the SentinelOne API**\n\n Follow the instructions to obtain the credentials.\n\n1. Log in to the SentinelOne Management Console with Admin user credentials.\n2. In the Management Console, click **Settings**.\n3. In the **SETTINGS** view, click **USERS**\n4. Click **New User**.\n5. Enter the information for the new console user.\n5. In Role, select **Admin**.\n6. Click **SAVE**\n7. Save credentials of the new user for using in the data connector.""}, {""title"": """", ""description"": ""**NOTE :-** Admin access can be delegated using custom roles. Please review SentinelOne [documentation](https://www.sentinelone.com/blog/feature-spotlight-fully-custom-role-based-access-control/) to learn more about custom RBAC.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the SentinelOne data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the SentinelOne Audit data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SentinelOneAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-SentinelOneAPI-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **SentinelOneAPIToken**, **SentinelOneUrl** `(https://.sentinelone.net)` and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the SentinelOne Reports data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-SentinelOneAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. SOneXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n 1. In the Function App, select the Function App Name and select **Configuration**.\n\n 2. In the **Application settings** tab, select ** New application setting**.\n\n 3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\t SentinelOneAPIToken\n\t\t SentinelOneUrl\n\t\t WorkspaceID\n\t\t WorkspaceKey\n\t\t logAnalyticsUri (optional)\n\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n\n 4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**SentinelOneAPIToken** is required. See the documentation to learn more about API on the `https://.sentinelone.net/api-doc/overview`.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne/Data%20Connectors/SentinelOne_API_FunctionApp.json","true" +"SentinelOneActivities_CL","SentinelOne","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne","azuresentinel","azure-sentinel-solution-sentinelone","2024-11-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SentinelOneCCP","Microsoft","SentinelOne","The [SentinelOne](https://usea1-nessat.sentinelone.net/api-doc/overview) data connector allows ingesting logs from the SentinelOne API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the SentinelOne API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the SentinelOne API \n Follow the instructions to obtain the credentials. You can also follow the [guide](https://usea1-nessat.sentinelone.net/docs/en/how-to-automate-api-token-generation.html#how-to-automate-api-token-generation) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve SentinelOne Management URL\n 1.1. Log in to the SentinelOne [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**] copy the URL link above without the URL path.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the SentinelOne [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**]\n 2.3. In [**Settings**] view click on [**USERS**].\n 2.4. In the [**USERS**] Page click on [**Service Users**] -> [**Actions**] -> [**Create new service user**].\n 2.5. Choose [**Expiration date**] and [**scope**] (by site) and click on [**Create User**].\n 2.6. Once the [**Service User**] is created copy the [**API Token**] from page and press [**Save**]""}}, {""parameters"": {""label"": ""SentinelOne Management URL"", ""placeholder"": ""https://example.sentinelone.net/"", ""type"": ""text"", ""name"": ""managementUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""securestring"", ""name"": ""apitoken""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne/Data%20Connectors/SentinelOne_ccp/connectorDefinition.json","true" +"SentinelOneAgents_CL","SentinelOne","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne","azuresentinel","azure-sentinel-solution-sentinelone","2024-11-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SentinelOneCCP","Microsoft","SentinelOne","The [SentinelOne](https://usea1-nessat.sentinelone.net/api-doc/overview) data connector allows ingesting logs from the SentinelOne API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the SentinelOne API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the SentinelOne API \n Follow the instructions to obtain the credentials. You can also follow the [guide](https://usea1-nessat.sentinelone.net/docs/en/how-to-automate-api-token-generation.html#how-to-automate-api-token-generation) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve SentinelOne Management URL\n 1.1. Log in to the SentinelOne [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**] copy the URL link above without the URL path.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the SentinelOne [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**]\n 2.3. In [**Settings**] view click on [**USERS**].\n 2.4. In the [**USERS**] Page click on [**Service Users**] -> [**Actions**] -> [**Create new service user**].\n 2.5. Choose [**Expiration date**] and [**scope**] (by site) and click on [**Create User**].\n 2.6. Once the [**Service User**] is created copy the [**API Token**] from page and press [**Save**]""}}, {""parameters"": {""label"": ""SentinelOne Management URL"", ""placeholder"": ""https://example.sentinelone.net/"", ""type"": ""text"", ""name"": ""managementUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""securestring"", ""name"": ""apitoken""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne/Data%20Connectors/SentinelOne_ccp/connectorDefinition.json","true" +"SentinelOneAlerts_CL","SentinelOne","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne","azuresentinel","azure-sentinel-solution-sentinelone","2024-11-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SentinelOneCCP","Microsoft","SentinelOne","The [SentinelOne](https://usea1-nessat.sentinelone.net/api-doc/overview) data connector allows ingesting logs from the SentinelOne API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the SentinelOne API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the SentinelOne API \n Follow the instructions to obtain the credentials. You can also follow the [guide](https://usea1-nessat.sentinelone.net/docs/en/how-to-automate-api-token-generation.html#how-to-automate-api-token-generation) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve SentinelOne Management URL\n 1.1. Log in to the SentinelOne [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**] copy the URL link above without the URL path.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the SentinelOne [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**]\n 2.3. In [**Settings**] view click on [**USERS**].\n 2.4. In the [**USERS**] Page click on [**Service Users**] -> [**Actions**] -> [**Create new service user**].\n 2.5. Choose [**Expiration date**] and [**scope**] (by site) and click on [**Create User**].\n 2.6. Once the [**Service User**] is created copy the [**API Token**] from page and press [**Save**]""}}, {""parameters"": {""label"": ""SentinelOne Management URL"", ""placeholder"": ""https://example.sentinelone.net/"", ""type"": ""text"", ""name"": ""managementUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""securestring"", ""name"": ""apitoken""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne/Data%20Connectors/SentinelOne_ccp/connectorDefinition.json","true" +"SentinelOneGroups_CL","SentinelOne","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne","azuresentinel","azure-sentinel-solution-sentinelone","2024-11-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SentinelOneCCP","Microsoft","SentinelOne","The [SentinelOne](https://usea1-nessat.sentinelone.net/api-doc/overview) data connector allows ingesting logs from the SentinelOne API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the SentinelOne API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the SentinelOne API \n Follow the instructions to obtain the credentials. You can also follow the [guide](https://usea1-nessat.sentinelone.net/docs/en/how-to-automate-api-token-generation.html#how-to-automate-api-token-generation) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve SentinelOne Management URL\n 1.1. Log in to the SentinelOne [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**] copy the URL link above without the URL path.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the SentinelOne [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**]\n 2.3. In [**Settings**] view click on [**USERS**].\n 2.4. In the [**USERS**] Page click on [**Service Users**] -> [**Actions**] -> [**Create new service user**].\n 2.5. Choose [**Expiration date**] and [**scope**] (by site) and click on [**Create User**].\n 2.6. Once the [**Service User**] is created copy the [**API Token**] from page and press [**Save**]""}}, {""parameters"": {""label"": ""SentinelOne Management URL"", ""placeholder"": ""https://example.sentinelone.net/"", ""type"": ""text"", ""name"": ""managementUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""securestring"", ""name"": ""apitoken""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne/Data%20Connectors/SentinelOne_ccp/connectorDefinition.json","true" +"SentinelOneThreats_CL","SentinelOne","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne","azuresentinel","azure-sentinel-solution-sentinelone","2024-11-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SentinelOneCCP","Microsoft","SentinelOne","The [SentinelOne](https://usea1-nessat.sentinelone.net/api-doc/overview) data connector allows ingesting logs from the SentinelOne API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the SentinelOne API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the SentinelOne API \n Follow the instructions to obtain the credentials. You can also follow the [guide](https://usea1-nessat.sentinelone.net/docs/en/how-to-automate-api-token-generation.html#how-to-automate-api-token-generation) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve SentinelOne Management URL\n 1.1. Log in to the SentinelOne [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**] copy the URL link above without the URL path.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the SentinelOne [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**]\n 2.3. In [**Settings**] view click on [**USERS**].\n 2.4. In the [**USERS**] Page click on [**Service Users**] -> [**Actions**] -> [**Create new service user**].\n 2.5. Choose [**Expiration date**] and [**scope**] (by site) and click on [**Create User**].\n 2.6. Once the [**Service User**] is created copy the [**API Token**] from page and press [**Save**]""}}, {""parameters"": {""label"": ""SentinelOne Management URL"", ""placeholder"": ""https://example.sentinelone.net/"", ""type"": ""text"", ""name"": ""managementUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""securestring"", ""name"": ""apitoken""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne/Data%20Connectors/SentinelOne_ccp/connectorDefinition.json","true" +"","SentinelSOARessentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelSOARessentials","azuresentinel","azure-sentinel-solution-sentinelsoaressentials","2022-06-27","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"SeraphicWebSecurity_CL","SeraphicSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SeraphicSecurity","seraphicalgorithmsltd1616061090462","seraphic-security-sentinel","2023-07-31","2023-07-31","","Seraphic Security","Partner","https://seraphicsecurity.com","","domains","SeraphicWebSecurity","Seraphic","Seraphic Web Security","The Seraphic Web Security data connector provides the capability to ingest [Seraphic Web Security](https://seraphicsecurity.com/) events and alerts into Microsoft Sentinel.","[{""title"": ""Connect Seraphic Web Security"", ""description"": ""Please insert the integration name, the Seraphic integration URL and your workspace name for Microsoft Sentinel:"", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Integration Name"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{subscriptionId}}""}, {""displayText"": ""Integration URL"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{endpoint}}""}, {""displayText"": ""Workspace Name - Log Analytics"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{workspaceName}}""}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true, ""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Seraphic API key"", ""description"": ""API key for Microsoft Sentinel connected to your Seraphic Web Security tenant. To get this API key for your tenant - [read this documentation](https://constellation.seraphicsecurity.com/integrations/microsoft_sentinel/Guidance/MicrosoftSentinel-IntegrationGuide-230822.pdf).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SeraphicSecurity/Data%20Connectors/SeraphicSecurityConnector.json","true" +"","ServiceNow TISC","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ServiceNow%20TISC","servicenow1594831756316","sentinel-solution-tisc","2025-01-15","2025-01-15","","ServiceNow","Partner","https://support.servicenow.com/now","","domains","","","","","","","","false" +"","Servicenow","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Servicenow","azuresentinel","azure-sentinel-solution-servicenow","2022-09-19","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"Sevco_Devices_CL","SevcoSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SevcoSecurity","azuresentinel","azure-sentinel-solution-sevcosecurity","2023-05-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SevcoDevices","Sevco Security","Sevco Platform - Devices","The Sevco Platform - Devices connector allows you to easily connect your Sevco Device Assets with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization’s assets and improves your security operation capabilities.

[For more information >​](https://docs.sev.co/docs/microsoft-sentinel-inventory)","[{""title"": ""Configure and connect to Sevco"", ""description"": ""The Sevco Platform can integrate with and export assets directly to Microsoft Sentinel..\u200b\n\n1. Go to [Sevco - Microsoft Sentinel Integration](https://docs.sev.co/docs/microsoft-sentinel-inventory), and follow the instructions, using the parameters below to set up the connection:.\n\n"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SevcoSecurity/Data%20Connectors/Connector_SevcoSecurity.json","true" +"","ShadowByte Aria","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ShadowByte%20Aria","shadowbyte1641237427416","ariasent1","2021-12-24","","","Shadowbyte","Partner","https://shadowbyte.com/products/aria/","","domains","","","","","","","","false" +"","Shodan","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Shodan","azuresentinel","azure-sentinel-solution-shodan","2023-02-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"CommonSecurityLog","Silverfort","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Silverfort","silverfort","microsoft-sentinel-solution-silverfort","2024-09-01","","","Silverfort","Partner","https://www.silverfort.com/customer-success/#support","","domains","SilverfortAma","Silverfort","Silverfort Admin Console","The [Silverfort](https://silverfort.com) ITDR Admin Console connector solution allows ingestion of Silverfort events and logging into Microsoft Sentinel.
Silverfort provides syslog based events and logging using Common Event Format (CEF). By forwarding your Silverfort ITDR Admin Console CEF data into Microsoft Sentinel, you can take advantage of Sentinels's search & correlation, alerting, and threat intelligence enrichment on Silverfort data.
Please contact Silverfort or consult the Silverfort documentation for more information.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Silverfort/Data%20Connectors/SilverfortAma.json","true" +"SlackAuditNativePoller_CL","SlackAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SlackAudit","azuresentinel","azure-sentinel-solution-slackaudit","2021-03-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SlackAudit","Slack","Slack","The [Slack](https://slack.com) data connector provides the capability to ingest [Slack Audit Records](https://api.slack.com/admins/audit-logs) events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://api.slack.com/admins/audit-logs#the_audit_event) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more. This data connector uses Microsoft Sentinel native polling capability.","[{""title"": ""Connect Slack to Microsoft Sentinel"", ""description"": ""Enable Slack audit Logs."", ""instructions"": [{""parameters"": {""enable"": ""true""}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Slack API credentials"", ""description"": ""**SlackAPIBearerToken** is required for REST API. [See the documentation to learn more about API](https://api.slack.com/web#authentication). Check all [requirements and follow the instructions](https://api.slack.com/web#authentication) for obtaining credentials.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SlackAudit/Data%20Connectors/SlackNativePollerConnector/azuredeploy_Slack_native_poller_connector.json","true" +"SlackAudit_CL","SlackAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SlackAudit","azuresentinel","azure-sentinel-solution-slackaudit","2021-03-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SlackAuditAPI","Slack","[DEPRECATED] Slack Audit","The [Slack](https://slack.com) Audit data connector provides the capability to ingest [Slack Audit Records](https://api.slack.com/admins/audit-logs) events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://api.slack.com/admins/audit-logs#the_audit_event) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Slack REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-SlackAuditAPI-parser) to create the Kusto functions alias, **SlackAudit**""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Slack API**\n\n [Follow the instructions](https://api.slack.com/web#authentication) to obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Slack Audit data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Slack Audit data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SlackAuditAPI-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **SlackAPIBearerToken** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Slack Audit data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentinel-SlackAuditAPI-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tSlackAPIBearerToken\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**SlackAPIBearerToken** is required for REST API. [See the documentation to learn more about API](https://api.slack.com/web#authentication). Check all [requirements and follow the instructions](https://api.slack.com/web#authentication) for obtaining credentials.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SlackAudit/Data%20Connectors/SlackAudit_API_FunctionApp.json","true" +"SlackAuditV2_CL","SlackAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SlackAudit","azuresentinel","azure-sentinel-solution-slackaudit","2021-03-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SlackAuditLogsCCPDefinition","Microsoft","SlackAudit (via Codeless Connector Framework)","The SlackAudit data connector provides the capability to ingest [Slack Audit logs](https://api.slack.com/admins/audit-logs) into Microsoft Sentinel through the REST API. Refer to [API documentation](https://api.slack.com/admins/audit-logs-call) for more information.","[{""description"": ""To ingest data from SlackAudit to Microsoft Sentinel, you have to click on Add Domain button below then you get a pop up to fill the details, provide the required information and click on Connect. You can see the usernames, actions connected in the grid.\n>"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""UserName"", ""columnValue"": ""properties.addOnAttributes.UserName""}, {""columnName"": ""Actions"", ""columnValue"": ""properties.addOnAttributes.Actions""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add domain"", ""title"": ""Add domain"", ""subtitle"": ""Add domain"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""UserName"", ""placeholder"": ""Enter your User Name"", ""name"": ""UserName"", ""type"": ""text"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""SlackAudit API Key"", ""placeholder"": ""Enter your API KEY"", ""name"": ""apiKey"", ""type"": ""password"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""SlackAudit Action Type"", ""placeholder"": ""Enter the Action Type"", ""name"": ""action"", ""type"": ""string"", ""required"": true}}]}]}}], ""title"": ""Connect SlackAudit to Microsoft Sentinel\n\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}], ""customs"": [{""name"": ""UserName, SlackAudit API Key & Action Type"", ""description"": ""To Generate the Access Token, create a new application in Slack, then add necessary scopes and configure the redirect URL. For detailed instructions on generating the access token, user name and action name limit, refer the [link](https://github.com/v-gsrihitha/v-gsrihitha/blob/main/SlackAudit/Readme.md).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SlackAudit/Data%20Connectors/SlackAuditLog_CCP/SlackAuditLog_ConnectorDefinition.json","true" +"","SlashNext","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SlashNext","slashnext1598548183597","slashnext-weblog-assessment-for-microsoft-sentinel","2022-08-12","2022-08-12","","SlashNext","Partner","https://support@slashnext.com","","domains","","","","","","","","false" +"","SlashNext SIEM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SlashNext%20SIEM","slashnext1598548183597","slashnext-security-events-for-microsoft-sentinel","2023-05-26","2023-06-16","","SlashNext","Partner","https://slashnext.com/support","","domains","","","","","","","","false" +"Snowflake_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeDataConnector","Snowflake","[DEPRECATED] Snowflake","The Snowflake data connector provides the capability to ingest Snowflake [login logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history.html) and [query logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history.html) into Microsoft Sentinel using the Snowflake Python Connector. Refer to [Snowflake documentation](https://docs.snowflake.com/en/user-guide/python-connector.html) for more information.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Azure Blob Storage API to pull logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Snowflake**](https://aka.ms/sentinel-SnowflakeDataConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Creating user in Snowflake**\n\nTo query data from Snowflake you need a user that is assigned to a role with sufficient privileges and a virtual warehouse cluster. The initial size of this cluster will be set to small but if it is insufficient, the cluster size can be increased as necessary.\n\n1. Enter the Snowflake console.\n2. Switch role to SECURITYADMIN and [create a new role](https://docs.snowflake.com/en/sql-reference/sql/create-role.html):\n```\nUSE ROLE SECURITYADMIN;\nCREATE OR REPLACE ROLE EXAMPLE_ROLE_NAME;```\n3. Switch role to SYSADMIN and [create warehouse](https://docs.snowflake.com/en/sql-reference/sql/create-warehouse.html) and [grand access](https://docs.snowflake.com/en/sql-reference/sql/grant-privilege.html) to it:\n```\nUSE ROLE SYSADMIN;\nCREATE OR REPLACE WAREHOUSE EXAMPLE_WAREHOUSE_NAME\n WAREHOUSE_SIZE = 'SMALL' \n AUTO_SUSPEND = 5\n AUTO_RESUME = true\n INITIALLY_SUSPENDED = true;\nGRANT USAGE, OPERATE ON WAREHOUSE EXAMPLE_WAREHOUSE_NAME TO ROLE EXAMPLE_ROLE_NAME;```\n4. Switch role to SECURITYADMIN and [create a new user](https://docs.snowflake.com/en/sql-reference/sql/create-user.html):\n```\nUSE ROLE SECURITYADMIN;\nCREATE OR REPLACE USER EXAMPLE_USER_NAME\n PASSWORD = 'example_password'\n DEFAULT_ROLE = EXAMPLE_ROLE_NAME\n DEFAULT_WAREHOUSE = EXAMPLE_WAREHOUSE_NAME\n;```\n5. Switch role to ACCOUNTADMIN and [grant access to snowflake database](https://docs.snowflake.com/en/sql-reference/account-usage.html#enabling-account-usage-for-other-roles) for role.\n```\nUSE ROLE ACCOUNTADMIN;\nGRANT IMPORTED PRIVILEGES ON DATABASE SNOWFLAKE TO ROLE EXAMPLE_ROLE_NAME;```\n6. Switch role to SECURITYADMIN and [assign role](https://docs.snowflake.com/en/sql-reference/sql/grant-role.html) to user:\n```\nUSE ROLE SECURITYADMIN;\nGRANT ROLE EXAMPLE_ROLE_NAME TO USER EXAMPLE_USER_NAME;```\n\n>**IMPORTANT:** Save user and API password created during this step as they will be used during deployment step.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Snowflake credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SnowflakeDataConnector-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Snowflake Account Identifier**, **Snowflake User**, **Snowflake Password**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentinel-SnowflakeDataConnector-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration. \n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tSNOWFLAKE_ACCOUNT\n\t\tSNOWFLAKE_USER\n\t\tSNOWFLAKE_PASSWORD\n\t\tWORKSPACE_ID\n\t\tSHARED_KEY\n\t\tlogAnalyticsUri (optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. \n4. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Snowflake Credentials"", ""description"": ""**Snowflake Account Identifier**, **Snowflake User** and **Snowflake Password** are required for connection. See the documentation to learn more about [Snowflake Account Identifier](https://docs.snowflake.com/en/user-guide/admin-account-identifier.html#). Instructions on how to create user for this connector you can find below.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/Snowflake_API_FunctionApp.json","true" +"SnowflakeLoad_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","[{""title"": ""Connect Snowflake to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** To ensure data is presented in separate columns for each field, execute the parser using the **Snowflake()** function""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""To gather data from Snowflake, you need to provide the following resources""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Account Identifier \n To gather data from Snowflake, you'll need Snowflake Account Identifier.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Programmatic Access Token \n To gather data from Snowflake, you'll need the Snowflake Programmatic Access Token""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""For detailed instructions on retrieving the Account Identifier and Programmatic Access Token, please refer to the [Connector Tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/Readme.md).""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Account-Identifier"", ""columnValue"": ""properties.addOnAttributes.AccountId""}, {""columnName"": ""Table Name"", ""columnValue"": ""properties.dataType""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake Account Identifier"", ""placeholder"": ""Enter Snowflake Account Identifier"", ""type"": ""text"", ""name"": ""accountId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake PAT"", ""placeholder"": ""Enter Snowflake PAT"", ""type"": ""password"", ""name"": ""apikey"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" +"SnowflakeLogin_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","[{""title"": ""Connect Snowflake to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** To ensure data is presented in separate columns for each field, execute the parser using the **Snowflake()** function""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""To gather data from Snowflake, you need to provide the following resources""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Account Identifier \n To gather data from Snowflake, you'll need Snowflake Account Identifier.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Programmatic Access Token \n To gather data from Snowflake, you'll need the Snowflake Programmatic Access Token""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""For detailed instructions on retrieving the Account Identifier and Programmatic Access Token, please refer to the [Connector Tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/Readme.md).""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Account-Identifier"", ""columnValue"": ""properties.addOnAttributes.AccountId""}, {""columnName"": ""Table Name"", ""columnValue"": ""properties.dataType""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake Account Identifier"", ""placeholder"": ""Enter Snowflake Account Identifier"", ""type"": ""text"", ""name"": ""accountId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake PAT"", ""placeholder"": ""Enter Snowflake PAT"", ""type"": ""password"", ""name"": ""apikey"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" +"SnowflakeMaterializedView_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","[{""title"": ""Connect Snowflake to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** To ensure data is presented in separate columns for each field, execute the parser using the **Snowflake()** function""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""To gather data from Snowflake, you need to provide the following resources""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Account Identifier \n To gather data from Snowflake, you'll need Snowflake Account Identifier.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Programmatic Access Token \n To gather data from Snowflake, you'll need the Snowflake Programmatic Access Token""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""For detailed instructions on retrieving the Account Identifier and Programmatic Access Token, please refer to the [Connector Tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/Readme.md).""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Account-Identifier"", ""columnValue"": ""properties.addOnAttributes.AccountId""}, {""columnName"": ""Table Name"", ""columnValue"": ""properties.dataType""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake Account Identifier"", ""placeholder"": ""Enter Snowflake Account Identifier"", ""type"": ""text"", ""name"": ""accountId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake PAT"", ""placeholder"": ""Enter Snowflake PAT"", ""type"": ""password"", ""name"": ""apikey"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" +"SnowflakeQuery_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","[{""title"": ""Connect Snowflake to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** To ensure data is presented in separate columns for each field, execute the parser using the **Snowflake()** function""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""To gather data from Snowflake, you need to provide the following resources""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Account Identifier \n To gather data from Snowflake, you'll need Snowflake Account Identifier.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Programmatic Access Token \n To gather data from Snowflake, you'll need the Snowflake Programmatic Access Token""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""For detailed instructions on retrieving the Account Identifier and Programmatic Access Token, please refer to the [Connector Tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/Readme.md).""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Account-Identifier"", ""columnValue"": ""properties.addOnAttributes.AccountId""}, {""columnName"": ""Table Name"", ""columnValue"": ""properties.dataType""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake Account Identifier"", ""placeholder"": ""Enter Snowflake Account Identifier"", ""type"": ""text"", ""name"": ""accountId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake PAT"", ""placeholder"": ""Enter Snowflake PAT"", ""type"": ""password"", ""name"": ""apikey"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" +"SnowflakeRoleGrant_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","[{""title"": ""Connect Snowflake to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** To ensure data is presented in separate columns for each field, execute the parser using the **Snowflake()** function""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""To gather data from Snowflake, you need to provide the following resources""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Account Identifier \n To gather data from Snowflake, you'll need Snowflake Account Identifier.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Programmatic Access Token \n To gather data from Snowflake, you'll need the Snowflake Programmatic Access Token""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""For detailed instructions on retrieving the Account Identifier and Programmatic Access Token, please refer to the [Connector Tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/Readme.md).""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Account-Identifier"", ""columnValue"": ""properties.addOnAttributes.AccountId""}, {""columnName"": ""Table Name"", ""columnValue"": ""properties.dataType""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake Account Identifier"", ""placeholder"": ""Enter Snowflake Account Identifier"", ""type"": ""text"", ""name"": ""accountId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake PAT"", ""placeholder"": ""Enter Snowflake PAT"", ""type"": ""password"", ""name"": ""apikey"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" +"SnowflakeRoles_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","[{""title"": ""Connect Snowflake to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** To ensure data is presented in separate columns for each field, execute the parser using the **Snowflake()** function""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""To gather data from Snowflake, you need to provide the following resources""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Account Identifier \n To gather data from Snowflake, you'll need Snowflake Account Identifier.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Programmatic Access Token \n To gather data from Snowflake, you'll need the Snowflake Programmatic Access Token""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""For detailed instructions on retrieving the Account Identifier and Programmatic Access Token, please refer to the [Connector Tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/Readme.md).""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Account-Identifier"", ""columnValue"": ""properties.addOnAttributes.AccountId""}, {""columnName"": ""Table Name"", ""columnValue"": ""properties.dataType""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake Account Identifier"", ""placeholder"": ""Enter Snowflake Account Identifier"", ""type"": ""text"", ""name"": ""accountId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake PAT"", ""placeholder"": ""Enter Snowflake PAT"", ""type"": ""password"", ""name"": ""apikey"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" +"SnowflakeTableStorageMetrics_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","[{""title"": ""Connect Snowflake to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** To ensure data is presented in separate columns for each field, execute the parser using the **Snowflake()** function""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""To gather data from Snowflake, you need to provide the following resources""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Account Identifier \n To gather data from Snowflake, you'll need Snowflake Account Identifier.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Programmatic Access Token \n To gather data from Snowflake, you'll need the Snowflake Programmatic Access Token""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""For detailed instructions on retrieving the Account Identifier and Programmatic Access Token, please refer to the [Connector Tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/Readme.md).""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Account-Identifier"", ""columnValue"": ""properties.addOnAttributes.AccountId""}, {""columnName"": ""Table Name"", ""columnValue"": ""properties.dataType""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake Account Identifier"", ""placeholder"": ""Enter Snowflake Account Identifier"", ""type"": ""text"", ""name"": ""accountId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake PAT"", ""placeholder"": ""Enter Snowflake PAT"", ""type"": ""password"", ""name"": ""apikey"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" +"SnowflakeTables_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","[{""title"": ""Connect Snowflake to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** To ensure data is presented in separate columns for each field, execute the parser using the **Snowflake()** function""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""To gather data from Snowflake, you need to provide the following resources""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Account Identifier \n To gather data from Snowflake, you'll need Snowflake Account Identifier.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Programmatic Access Token \n To gather data from Snowflake, you'll need the Snowflake Programmatic Access Token""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""For detailed instructions on retrieving the Account Identifier and Programmatic Access Token, please refer to the [Connector Tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/Readme.md).""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Account-Identifier"", ""columnValue"": ""properties.addOnAttributes.AccountId""}, {""columnName"": ""Table Name"", ""columnValue"": ""properties.dataType""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake Account Identifier"", ""placeholder"": ""Enter Snowflake Account Identifier"", ""type"": ""text"", ""name"": ""accountId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake PAT"", ""placeholder"": ""Enter Snowflake PAT"", ""type"": ""password"", ""name"": ""apikey"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" +"SnowflakeUserGrant_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","[{""title"": ""Connect Snowflake to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** To ensure data is presented in separate columns for each field, execute the parser using the **Snowflake()** function""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""To gather data from Snowflake, you need to provide the following resources""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Account Identifier \n To gather data from Snowflake, you'll need Snowflake Account Identifier.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Programmatic Access Token \n To gather data from Snowflake, you'll need the Snowflake Programmatic Access Token""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""For detailed instructions on retrieving the Account Identifier and Programmatic Access Token, please refer to the [Connector Tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/Readme.md).""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Account-Identifier"", ""columnValue"": ""properties.addOnAttributes.AccountId""}, {""columnName"": ""Table Name"", ""columnValue"": ""properties.dataType""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake Account Identifier"", ""placeholder"": ""Enter Snowflake Account Identifier"", ""type"": ""text"", ""name"": ""accountId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake PAT"", ""placeholder"": ""Enter Snowflake PAT"", ""type"": ""password"", ""name"": ""apikey"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" +"SnowflakeUsers_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","[{""title"": ""Connect Snowflake to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** To ensure data is presented in separate columns for each field, execute the parser using the **Snowflake()** function""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""To gather data from Snowflake, you need to provide the following resources""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Account Identifier \n To gather data from Snowflake, you'll need Snowflake Account Identifier.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Programmatic Access Token \n To gather data from Snowflake, you'll need the Snowflake Programmatic Access Token""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""For detailed instructions on retrieving the Account Identifier and Programmatic Access Token, please refer to the [Connector Tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/Readme.md).""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Account-Identifier"", ""columnValue"": ""properties.addOnAttributes.AccountId""}, {""columnName"": ""Table Name"", ""columnValue"": ""properties.dataType""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake Account Identifier"", ""placeholder"": ""Enter Snowflake Account Identifier"", ""type"": ""text"", ""name"": ""accountId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake PAT"", ""placeholder"": ""Enter Snowflake PAT"", ""type"": ""password"", ""name"": ""apikey"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" +"CommonSecurityLog","SonicWall Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SonicWall%20Firewall","sonicwall-inc","sonicwall-networksecurity-azure-sentinal","2022-05-06","","","SonicWall","Partner","https://www.sonicwall.com/support/","","domains","SonicWallFirewall","SonicWall","[Deprecated] SonicWall Firewall via Legacy Agent","Common Event Format (CEF) is an industry standard format on top of Syslog messages, used by SonicWall to allow event interoperability among different platforms. By connecting your CEF logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward SonicWall Firewall Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your SonicWall Firewall to send Syslog messages in CEF format to the proxy machine. Make sure you send the logs to port 514 TCP on the machine's IP address.\n\n Follow Instructions . Then Make sure you select local use 4 as the facility. Then select ArcSight as the Syslog format.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SonicWall%20Firewall/Data%20Connectors/SonicwallFirewall.json","true" +"CommonSecurityLog","SonicWall Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SonicWall%20Firewall","sonicwall-inc","sonicwall-networksecurity-azure-sentinal","2022-05-06","","","SonicWall","Partner","https://www.sonicwall.com/support/","","domains","SonicWallFirewallAma","SonicWall","[Deprecated] SonicWall Firewall via AMA","Common Event Format (CEF) is an industry standard format on top of Syslog messages, used by SonicWall to allow event interoperability among different platforms. By connecting your CEF logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine""}, {""title"": ""Step B. Forward SonicWall Firewall Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your SonicWall Firewall to send Syslog messages in CEF format to the proxy machine. Make sure you send the logs to port 514 TCP on the machine's IP address.\n\n Follow Instructions . Then Make sure you select local use 4 as the facility. Then select ArcSight as the Syslog format.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SonicWall%20Firewall/Data%20Connectors/template_SonicwallFirewallAMA.json","true" +"Sonrai_Tickets_CL","SonraiSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SonraiSecurity","sonraisecurityllc1584373214489","sonrai_sentinel_offer","2021-10-18","","","Sonrai","Partner","","","domains","SonraiDataConnector","Sonrai","Sonrai Data Connector","Use this data connector to integrate with Sonrai Security and get Sonrai tickets sent directly to Microsoft Sentinel.","[{""title"": ""Sonrai Security Data Connector"", ""description"": ""1. Navigate to Sonrai Security dashboard.\n2. On the bottom left panel, click on integrations.\n3. Select Microsoft Sentinel from the list of available Integrations.\n4. Fill in the form using the information provided below."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SonraiSecurity/Data%20Connectors/Connector_REST_API_Sonrai.json","true" +"SophosCloudOptix_CL","Sophos Cloud Optix","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20Cloud%20Optix","sophos","sophos_cloud_optix_mss","2022-05-02","","","Sophos","Partner","https://www.sophos.com/en-us/support","","domains","SophosCloudOptix","Sophos","Sophos Cloud Optix","The [Sophos Cloud Optix](https://www.sophos.com/products/cloud-optix.aspx) connector allows you to easily connect your Sophos Cloud Optix logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's cloud security and compliance posture and improves your cloud security operation capabilities.","[{""title"": ""1. Get the Workspace ID and the Primary Key"", ""description"": ""Copy the Workspace ID and Primary Key for your workspace.\n"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""2. Configure the Sophos Cloud Optix Integration"", ""description"": ""In Sophos Cloud Optix go to [Settings->Integrations->Microsoft Sentinel](https://optix.sophos.com/#/integrations/sentinel) and enter the Workspace ID and Primary Key copied in Step 1.\n""}, {""title"": ""3. Select Alert Levels"", ""description"": ""In Alert Levels, select which Sophos Cloud Optix alerts you want to send to Microsoft Sentinel.\n""}, {""title"": ""4. Turn on the integration"", ""description"": ""To turn on the integration, select Enable, and then click Save.\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20Cloud%20Optix/Data%20Connectors/Connector_REST_API_SophosCloudOptix.json","true" +"SophosEP_CL","Sophos Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-sophosep","2021-07-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SophosEP","Sophos","Sophos Endpoint Protection","The [Sophos Endpoint Protection](https://www.sophos.com/en-us/products/endpoint-antivirus.aspx) data connector provides the capability to ingest [Sophos events](https://docs.sophos.com/central/Customer/help/en-us/central/Customer/common/concepts/Events.html) into Microsoft Sentinel. Refer to [Sophos Central Admin documentation](https://docs.sophos.com/central/Customer/help/en-us/central/Customer/concepts/Logs.html) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Sophos Central APIs to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**SophosEPEvent**](https://aka.ms/sentinel-SophosEP-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Sophos Central API**\n\n Follow the instructions to obtain the credentials.\n\n1. In Sophos Central Admin, go to **Global Settings > API Token Management**.\n2. To create a new token, click **Add token** from the top-right corner of the screen.\n3. Select a **token name** and click **Save**. The **API Token Summary** for this token is displayed.\n4. Click **Copy** to copy your **API Access URL + Headers** from the **API Token Summary** section into your clipboard.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Sophos Endpoint Protection data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Sophos Endpoint Protection data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SophosEP-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **Sophos API Access URL and Headers**, **AzureSentinelWorkspaceId**, **AzureSentinelSharedKey**. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Sophos Endpoint Protection data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-SophosEP-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tSOPHOS_TOKEN\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**API token** is required. [See the documentation to learn more about API token](https://docs.sophos.com/central/Customer/help/en-us/central/Customer/concepts/ep_ApiTokenManagement.html)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20Endpoint%20Protection/Data%20Connectors/SophosEP_API_FunctionApp.json","true" +"SophosEPAlerts_CL","Sophos Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-sophosep","2021-07-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SophosEndpointProtectionCCPDefinition","Microsoft","Sophos Endpoint Protection (using REST API)","The [Sophos Endpoint Protection](https://www.sophos.com/en-us/products/endpoint-antivirus.aspx) data connector provides the capability to ingest [Sophos events](https://developer.sophos.com/docs/siem-v1/1/routes/events/get) and [Sophos alerts](https://developer.sophos.com/docs/siem-v1/1/routes/alerts/get) into Microsoft Sentinel. Refer to [Sophos Central Admin documentation](https://docs.sophos.com/central/Customer/help/en-us/central/Customer/concepts/Logs.html) for more information.","[{""description"": ""Follow [Sophos instructions](https://developer.sophos.com/getting-started-tenant) to create a service principal with access to the Sophos API. It will need the Service Principal ReadOnly role.\n Through those instructions, you should get the Client ID, Client Secret, Tenant ID and data region.\n Fill the form bellow with that information."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Sophos Tenant ID"", ""placeholder"": ""Sophos Tenant ID"", ""type"": ""text"", ""name"": ""sophosTenantId""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Sophos Tenant Data Region"", ""placeholder"": ""eu01, eu02, us01, us02 or us03"", ""type"": ""text"", ""name"": ""sophosRegion""}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""name"", ""columnName"": ""Name""}, {""columnValue"": ""id"", ""columnName"": ""ID""}]}}], ""title"": ""Connect to Sophos Endpoint Protection API to start collecting event and alert logs in Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Sophos Endpoint Protection API access"", ""description"": ""Access to the Sophos Endpoint Protection API through a service principal is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20Endpoint%20Protection/Data%20Connectors/SophosEP_ccp/SophosEP_DataConnectorDefinition.json","true" +"SophosEPEvents_CL","Sophos Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-sophosep","2021-07-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SophosEndpointProtectionCCPDefinition","Microsoft","Sophos Endpoint Protection (using REST API)","The [Sophos Endpoint Protection](https://www.sophos.com/en-us/products/endpoint-antivirus.aspx) data connector provides the capability to ingest [Sophos events](https://developer.sophos.com/docs/siem-v1/1/routes/events/get) and [Sophos alerts](https://developer.sophos.com/docs/siem-v1/1/routes/alerts/get) into Microsoft Sentinel. Refer to [Sophos Central Admin documentation](https://docs.sophos.com/central/Customer/help/en-us/central/Customer/concepts/Logs.html) for more information.","[{""description"": ""Follow [Sophos instructions](https://developer.sophos.com/getting-started-tenant) to create a service principal with access to the Sophos API. It will need the Service Principal ReadOnly role.\n Through those instructions, you should get the Client ID, Client Secret, Tenant ID and data region.\n Fill the form bellow with that information."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Sophos Tenant ID"", ""placeholder"": ""Sophos Tenant ID"", ""type"": ""text"", ""name"": ""sophosTenantId""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Sophos Tenant Data Region"", ""placeholder"": ""eu01, eu02, us01, us02 or us03"", ""type"": ""text"", ""name"": ""sophosRegion""}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""name"", ""columnName"": ""Name""}, {""columnValue"": ""id"", ""columnName"": ""ID""}]}}], ""title"": ""Connect to Sophos Endpoint Protection API to start collecting event and alert logs in Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Sophos Endpoint Protection API access"", ""description"": ""Access to the Sophos Endpoint Protection API through a service principal is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20Endpoint%20Protection/Data%20Connectors/SophosEP_ccp/SophosEP_DataConnectorDefinition.json","true" +"Syslog","Sophos XG Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20XG%20Firewall","azuresentinel","azure-sentinel-solution-sophosxgfirewall","2021-10-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SophosXGFirewall","Sophos","[Deprecated] Sophos XG Firewall","The [Sophos XG Firewall](https://www.sophos.com/products/next-gen-firewall.aspx) allows you to easily connect your Sophos XG Firewall logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigations. Integrating Sophos XG Firewall with Microsoft Sentinel provides more visibility into your organization's firewall traffic and will enhance security monitoring capabilities.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Sophos XG Firewall and load the function code or click [here](https://aka.ms/sentinel-SophosXG-parser), on the second line of the query, enter the hostname(s) of your Sophos XG Firewall device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n 2. Select **Apply below configuration to my machines** and select the facilities and severities.\n 3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure and connect the Sophos XG Firewall"", ""description"": ""[Follow these instructions](https://doc.sophos.com/nsg/sophos-firewall/20.0/Help/en-us/webhelp/onlinehelp/AdministratorHelp/SystemServices/LogSettings/SyslogServerAdd/index.html) to enable syslog streaming. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Sophos XG Firewall"", ""description"": ""must be configured to export logs via Syslog""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20XG%20Firewall/Data%20Connectors/Connector_Syslog_SophosXGFirewall.json","true" +"","SpyCloud Enterprise Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SpyCloud%20Enterprise%20Protection","spycloudinc1680448518850","azure-sentinel-solution-spycloudenterprise","2023-09-09","","","Spycloud","Partner","https://portal.spycloud.com","","domains","","","","","","","","false" +"secRMM_CL","Squadra Technologies SecRmm","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Squadra%20Technologies%20SecRmm","squadratechnologies","squadra_technologies_secrmm_mss","2022-05-09","","","Squadra Technologies","Partner","https://www.squadratechnologies.com/Contact.aspx","","domains","SquadraTechnologiesSecRMM","Squadra Technologies","Squadra Technologies secRMM","Use the Squadra Technologies secRMM Data Connector to push USB removable storage security event data into Microsoft Sentinel Log Analytics.","[{""title"": """", ""description"": ""Follow the step-by-step instructions provided in the [Squadra Technologies configuration guide for Azure Sentinel](https://www.squadratechnologies.com/StaticContent/ProductDownload/secRMM/9.11.0.0/secRMMAzureSentinelAdministratorGuide.pdf)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Squadra%20Technologies%20SecRmm/Data%20Connectors/SquadraTechnologiesSecRMM.json","true" +"SquidProxy_CL","SquidProxy","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SquidProxy","azuresentinel","azure-sentinel-solution-squidproxy","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SquidProxy","Squid","[Deprecated] Squid Proxy","The [Squid Proxy](http://www.squid-cache.org/) connector allows you to easily connect your Squid Proxy logs with Microsoft Sentinel. This gives you more insight into your organization's network proxy traffic and improves your security operation capabilities.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Squid Proxy and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SquidProxy/Parsers/SquidProxy.txt), on the second line of the query, enter the hostname(s) of your SquidProxy device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Squid Proxy server where the logs are generated.\n\n> Logs from Squid Proxy deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the custom log directory to be collected"", ""instructions"": [{""parameters"": {""linkType"": ""OpenCustomLogsSettings""}, ""type"": ""InstallAgent""}]}, {""title"": """", ""description"": ""1. Select the link above to open your workspace advanced settings \n2. From the left pane, select **Data**, select **Custom Logs** and click **Add+**\n3. Click **Browse** to upload a sample of a Squid Proxy log file(e.g. access.log or cache.log). Then, click **Next >**\n4. Select **New line** as the record delimiter and click **Next >**\n5. Select **Windows** or **Linux** and enter the path to Squid Proxy logs. Default paths are: \n - **Windows** directory: `C:\\Squid\\var\\log\\squid\\*.log`\n - **Linux** Directory: `/var/log/squid/*.log` \n6. After entering the path, click the '+' symbol to apply, then click **Next >** \n7. Add **SquidProxy_CL** as the custom log Name and click **Done**""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SquidProxy/Data%20Connectors/Connector_CustomLog_SquidProxy.json","true" +"StyxViewAlerts_CL","Styx Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Styx%20Intelligence","styx_intelligence","microsoft-sentinel-solution-styxintelligence","2025-02-07","","","Styx Intelligence","Partner","https://www.styxintel.com/contact-us/","","domains","StyxViewEndpointConnectorDefinition","Styx Intelligence","StyxView Alerts (via Codeless Connector Platform)","The [StyxView Alerts](https://styxintel.com/) data connector enables seamless integration between the StyxView Alerts platform and Microsoft Sentinel. This connector ingests alert data from the StyxView Alerts API, allowing organizations to centralize and correlate actionable threat intelligence directly within their Microsoft Sentinel workspace.","[{""description"": ""Contact Styx Intelligence Support (support.team@styxintel.com) to get access to an API key."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""APIKey""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""title"": ""Connect to StyxView Alerts API to start collecting alert logs in Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""StyxView Alert API access"", ""description"": ""Access to the StyxView Alerts API through an API key is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Styx%20Intelligence/Data%20Connectors/Alerts/StyxView%20Alerts_ConnectorDefinition.json","true" +"Syslog","Symantec Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Symantec%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-symantecendpointprotection","2022-07-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SymantecEndpointProtection","Broadcom","[Deprecated] Symantec Endpoint Protection","The [Broadcom Symantec Endpoint Protection (SEP)](https://www.broadcom.com/products/cyber-security/endpoint/end-user/enterprise) connector allows you to easily connect your SEP logs with Microsoft Sentinel. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Symantec Endpoint Protection and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Symantec%20Endpoint%20Protection/Parsers/SymantecEndpointProtection.yaml), on the second line of the query, enter the hostname(s) of your SymantecEndpointProtection device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n 2. Select **Apply below configuration to my machines** and select the facilities and severities.\n 3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure and connect the Symantec Endpoint Protection"", ""description"": ""[Follow these instructions](https://techdocs.broadcom.com/us/en/symantec-security-software/endpoint-security-and-management/endpoint-protection/all/Monitoring-Reporting-and-Enforcing-Compliance/viewing-logs-v7522439-d37e464/exporting-data-to-a-syslog-server-v8442743-d15e1107.html) to configure the Symantec Endpoint Protection to forward syslog. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Symantec Endpoint Protection (SEP)"", ""description"": ""must be configured to export logs via Syslog""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Symantec%20Endpoint%20Protection/Data%20Connectors/Connector_Syslog_SymantecEndpointProtection.json","true" +"","Symantec Integrated Cyber Defense","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Symantec%20Integrated%20Cyber%20Defense","azuresentinel","symantec_icdx_mss","2022-06-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"Syslog","Symantec VIP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Symantec%20VIP","azuresentinel","azure-sentinel-solution-symantecvip","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SymantecVIP","Symantec","[Deprecated] Symantec VIP","The [Symantec VIP](https://vip.symantec.com/) connector allows you to easily connect your Symantec VIP logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Symantec VIP and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Symantec%20VIP/Parsers/SymantecVIP.yaml), on the second line of the query, enter the hostname(s) of your Symantec VIP device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n 2. Select **Apply below configuration to my machines** and select the facilities and severities.\n 3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure and connect the Symantec VIP"", ""description"": ""[Follow these instructions](https://aka.ms/sentinel-symantecvip-configurationsteps) to configure the Symantec VIP Enterprise Gateway to forward syslog. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Symantec VIP"", ""description"": ""must be configured to export logs via Syslog""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Symantec%20VIP/Data%20Connectors/Connector_Syslog_SymantecVIP.json","true" +"Syslog","SymantecProxySG","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SymantecProxySG","azuresentinel","azure-sentinel-symantec-proxysg","2021-05-25","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SymantecProxySG","Symantec","[Deprecated] Symantec ProxySG","The [Symantec ProxySG](https://www.broadcom.com/products/cyber-security/network/gateway/proxy-sg-and-advanced-secure-gateway) allows you to easily connect your Symantec ProxySG logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigations. Integrating Symantec ProxySG with Microsoft Sentinel provides more visibility into your organization's network proxy traffic and will enhance security monitoring capabilities.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Symantec Proxy SG and load the function code or click [here](https://aka.ms/sentinel-SymantecProxySG-parser), on the second line of the query, enter the hostname(s) of your Symantec Proxy SG device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n 2. Select **Apply below configuration to my machines** and select the facilities and severities.\n 3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure and connect the Symantec ProxySG"", ""description"": "" \n 1. Log in to the Blue Coat Management Console .\n 2. Select Configuration > Access Logging > Formats.\n 3. Select New.\n 4. Enter a unique name in the Format Name field.\n 5. Click the radio button for **Custom format string** and paste the following string into the field.\n

1 $(date) $(time) $(time-taken) $(c-ip) $(cs-userdn) $(cs-auth-groups) $(x-exception-id) $(sc-filter-result) $(cs-categories) $(quot)$(cs(Referer))$(quot) $(sc-status) $(s-action) $(cs-method) $(quot)$(rs(Content-Type))$(quot) $(cs-uri-scheme) $(cs-host) $(cs-uri-port) $(cs-uri-path) $(cs-uri-query) $(cs-uri-extension) $(quot)$(cs(User-Agent))$(quot) $(s-ip) $(sr-bytes) $(rs-bytes) $(x-virus-id) $(x-bluecoat-application-name) $(x-bluecoat-application-operation) $(cs-uri-port) $(x-cs-client-ip-country) $(cs-threat-risk)

\n 6. Click the **OK** button. \n 7. Click the **Apply** button. \n 8. [Follow these instructions](https://knowledge.broadcom.com/external/article/166529/sending-access-logs-to-a-syslog-server.html) to enable syslog streaming of **Access** Logs. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Symantec ProxySG"", ""description"": ""must be configured to export logs via Syslog""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SymantecProxySG/Data%20Connectors/Connector_Syslog_SymantecProxySG.json","true" +"","Synack","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Synack","","","","","","","","","","","","","","","","","","false" +"Syslog","Syslog","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Syslog","azuresentinel","azure-sentinel-solution-syslog","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Syslog","Microsoft","Syslog via Legacy Agent","Syslog is an event logging protocol that is common to Linux. Applications will send messages that may be stored on the local machine or delivered to a Syslog collector. When the Agent for Linux is installed, it configures the local Syslog daemon to forward messages to the agent. The agent then sends the message to the workspace.

[Learn more >](https://aka.ms/sysLogInfo)","[{""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""You can collect Syslog events from your local machine by installing the agent on it. You can also collect Syslog generated on a different source by running the installation script below on the local machine, where the agent is installed.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n\n1. Select the link below to open your workspace **agents configuration**, and select the **Syslog** tab.\n2. Select **Add facility** and choose from the drop-down list of facilities. Repeat for all the facilities you want to add.\n3. Mark the check boxes for the desired severities for each facility.\n4. Click **Apply**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Syslog/Data%20Connectors/template_Syslog.json","true" +"Syslog","Syslog","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Syslog","azuresentinel","azure-sentinel-solution-syslog","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SyslogAma","Microsoft","Syslog via AMA","Syslog is an event logging protocol that is common to Linux. Applications will send messages that may be stored on the local machine or delivered to a Syslog collector. When the Agent for Linux is installed, it configures the local Syslog daemon to forward messages to the agent. The agent then sends the message to the workspace.

[Learn more >](https://aka.ms/sysLogInfo)","[{""title"": ""Enable data collection rule\u200b"", ""description"": ""You can collect Syslog events from your local machine by installing the agent on it. You can also collect Syslog generated on a different source by running the installation script below on the local machine, where the agent is installed.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""type"": ""SysLogAma""}]}, {""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 6}, ""type"": ""InstallAgent""}]}, {""title"": ""Run the following command to install and apply the Syslog collector:"", ""description"": ""> To collect logs generated on a different machine run this script on the machine where the agent is installed."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces/datasources"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace data sources"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Syslog/Data%20Connectors/template_SyslogAma.json","true" +"Talon_CL","Talon","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Talon","taloncybersecurityltd1654088115170","talonconnector","2023-01-25","","","Talon Security","Partner","https://docs.console.talon-sec.com/","","domains","TalonLogs","Talon Security","Talon Insights","The Talon Security Logs connector allows you to easily connect your Talon events and audit logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation.","[{""title"": """", ""description"": ""Please note the values below and follow the instructions here to connect your Talon Security events and audit logs with Microsoft Sentinel."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Talon/Data%20Connectors/TalonLogs.json","true" +"","Tanium","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tanium","taniuminc1646329360287","tanium_sentinel_connector","2022-05-16","2025-07-03","","Tanium Inc.","Partner","https://support.tanium.com","","domains","","","","","","","","false" +"Cymru_Scout_Account_Usage_Data_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" +"Cymru_Scout_Domain_Data_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" +"Cymru_Scout_IP_Data_Communications_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" +"Cymru_Scout_IP_Data_Details_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" +"Cymru_Scout_IP_Data_Fingerprints_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" +"Cymru_Scout_IP_Data_Foundation_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" +"Cymru_Scout_IP_Data_OpenPorts_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" +"Cymru_Scout_IP_Data_PDNS_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" +"Cymru_Scout_IP_Data_Summary_Certs_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" +"Cymru_Scout_IP_Data_Summary_Details_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" +"Cymru_Scout_IP_Data_Summary_Fingerprints_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" +"Cymru_Scout_IP_Data_Summary_OpenPorts_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" +"Cymru_Scout_IP_Data_Summary_PDNS_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" +"Cymru_Scout_IP_Data_x509_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" +"","Teams","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Teams","sentinel4teams","sentinelforteams","2022-02-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"","Templates","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Templates","","","","","","","","","","","","","","","","","","false" +"Tenable_IE_CL","Tenable App","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App","tenable","tenable-sentinel-integration","2024-06-06","2025-06-19","","Tenable","Partner","https://www.tenable.com/support/technical-support","","domains","TenableIE","Tenable","Tenable Identity Exposure","Tenable Identity Exposure connector allows Indicators of Exposure, Indicators of Attack and trailflow logs to be ingested into Microsoft Sentinel.The different work books and data parsers allow you to more easily manipulate logs and monitor your Active Directory environment. The analytic templates allow you to automate responses regarding different events, exposures and attacks.","[{""title"": """", ""description"": "">This data connector depends on [afad_parser](https://aka.ms/sentinel-TenableApp-afad-parser) based on a Kusto Function to work as expected which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Configure the Syslog server"", ""description"": ""You will first need a **linux Syslog** server that TenableIE will send logs to. Typically you can run **rsyslog** on **Ubuntu**.\n You can then configure this server as you wish, but it is recommended to be able to output TenableIE logs in a separate file.\n\nConfigure rsyslog to accept logs from your TenableIE IP address.:\n\n```shell\nsudo -i\n\n# Set TenableIE source IP address\nexport TENABLE_IE_IP={Enter your IP address}\n\n# Create rsyslog configuration file\ncat > /etc/rsyslog.d/80-tenable.conf << EOF\n\\$ModLoad imudp\n\\$UDPServerRun 514\n\\$ModLoad imtcp\n\\$InputTCPServerRun 514\n\\$AllowedSender TCP, 127.0.0.1, $TENABLE_IE_IP\n\\$AllowedSender UDP, 127.0.0.1, $TENABLE_IE_IP\n\\$template MsgTemplate,\""%TIMESTAMP:::date-rfc3339% %HOSTNAME% %programname%[%procid%]:%msg%\\n\""\n\\$template remote-incoming-logs, \""/var/log/%PROGRAMNAME%.log\""\n*.* ?remote-incoming-logs;MsgTemplate\nEOF\n\n# Restart rsyslog\nsystemctl restart rsyslog\n```""}, {""title"": ""2. Install and onboard the Microsoft agent for Linux"", ""description"": ""The OMS agent will receive the TenableIE syslog events and publish it in Microsoft Sentinel :"", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""3. Check agent logs on the Syslog server"", ""description"": ""```shell\ntail -f /var/opt/microsoft/omsagent/log/omsagent.log\n```""}, {""title"": ""4. Configure TenableIE to send logs to your Syslog server"", ""description"": ""On your **TenableIE** portal, go to *System*, *Configuration* and then *Syslog*.\nFrom there you can create a new Syslog alert toward your Syslog server.\n\nOnce this is done, check that the logs are correctly gathered on your server in a separate file (to do this, you can use the *Test the configuration* button in the Syslog alert configuration in TenableIE).\nIf you used the Quickstart template, the Syslog server will by default listen on port 514 in UDP and 1514 in TCP, without TLS.""}, {""title"": ""5. Configure the custom logs"", ""description"": ""Configure the agent to collect the logs.\n\n1. In Microsoft Sentinel, go to **Configuration** -> **Settings** -> **Workspace settings** -> **Custom logs**.\n2. Click **Add custom log**.\n3. Upload a sample TenableIE.log Syslog file from the **Linux** machine running the **Syslog** server and click **Next**\n4. Set the record delimiter to **New Line** if not already the case and click **Next**.\n5. Select **Linux** and enter the file path to the **Syslog** file, click **+** then **Next**. The default location of the file is `/var/log/TenableIE.log` if you have a Tenable version <3.1.0, you must also add this linux file location `/var/log/AlsidForAD.log`.\n6. Set the **Name** to *Tenable_IE_CL* (Azure automatically adds *_CL* at the end of the name, there must be only one, make sure the name is not *Tenable_IE_CL_CL*).\n7. Click **Next**, you will see a resume, then click **Create**\n"", ""instructions"": []}, {""title"": ""6. Enjoy !"", ""description"": ""> You should now be able to receive logs in the *Tenable_IE_CL* table, logs data can be parse using the **afad_parser()** function, used by all query samples, workbooks and analytic templates.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Access to TenableIE Configuration"", ""description"": ""Permissions to configure syslog alerting engine""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App/Data%20Connectors/TenableIE/TenableIE.json","true" +"Tenable_VM_Asset_CL","Tenable App","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App","tenable","tenable-sentinel-integration","2024-06-06","2025-06-19","","Tenable","Partner","https://www.tenable.com/support/technical-support","","domains","TenableVM","Tenable","Tenable Vulnerability Management","The TVM data connector provides the ability to ingest Asset, Vulnerability, Compliance, WAS assets and WAS vulnerabilities data into Microsoft Sentinel using TVM REST APIs. Refer to [API documentation](https://developer.tenable.com/reference) for more information. The connector provides the ability to get data which helps to examine potential security risks, get insight into your computing assets, diagnose configuration problems and more","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Durable Functions to connect to the TenableVM API to pull [assets](https://developer.tenable.com/reference#exports-assets-download-chunk), [vulnerabilities](https://developer.tenable.com/reference#exports-vulns-request-export) and [compliance](https://developer.tenable.com/reference#exports-compliance-request-export)(if selected) at a regular interval into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a [**TenableVM parser for vulnerabilities**](https://aka.ms/sentinel-TenableApp-TenableVMVulnerabilities-parser) and a [**TenableVM parser for assets**](https://aka.ms/sentinel-TenableApp-TenableVMAssets-parser) based on a Kusto Function to work as expected which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for TenableVM**\n\n [Follow the instructions](https://docs.tenable.com/vulnerability-management/Content/Settings/my-account/GenerateAPIKey.htm) to obtain the required API credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TenableVM Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function App**\n\n""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TenableVM Vulnerability Management Report data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TenableVM-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-TenableVM-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group**, **FunctionApp Name** and **Location**. \n3. Enter the below information : \n\n\t a. **WorkspaceName** - Enter the Workspace Name of the log analytics Workspace. \n\n\t b. **TenableAccessKey** - Enter Access key for using the Tenable API. \n\n\t c. **TenableSecretKey** - Enter Tenable Secret Key for Authentication. \n\n\t d. **AzureClientID** - Enter Azure Client ID. \n\n\t e. **AzureClientSecret** - Enter Azure Client Secret. \n\n\t f. **TenantID** - Enter Tenant ID got from above steps. \n\n\t g. **AzureEntraObjectId** - Enter Azure Object ID got from above steps. \n\n\t h. **LowestSeveritytoStore** - Lowest vulnerability severity to store. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t i. **ComplianceDataIngestion** - Select true if you want to enable Compliance data ingestion from Tenable VM. Default is false. \n\n\t j. **WASAssetDataIngestion** - Select true if you want to enable WAS Asset data ingestion from Tenable VM. Default is false. \n\n\t k. **WASVulnerabilityDataIngestion** - Select true if you want to enable WAS Vulnerability data ingestion from Tenable VM. Default is false. \n\n\t l. **LowestSeveritytoStoreWAS** - The Lowest Vulnerability severity to store for WAS. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t m. **TenableExportScheduleInMinutes** - Schedule in minutes to create new export job from Tenable VM. Default is 1440. \n\n\t n. **AssetTableName** - Enter name of the table used to store Asset Data logs. \n\n\t o. **VulnTableName** - Enter name of the table used to store Vulnerability Data logs. \n\n\t p. **ComplianceTableName** - Enter name of the table used to store Compliance Data logs. \n\n\t q. **WASAssetTableName** - Enter name of the table used to store WAS Asset Data logs. \n\n\t r. **WASVulnTableName** - Enter name of the table used to store WAS Vulnerability Data logs. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TenableVM Vulnerability Management Report data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TenableVMAzureSentinelConnector310Updated-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. TenableVMXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\n\t a. **WorkspaceName** - Enter the Workspace Name of the log analytics Workspace. \n\n\t b. **TenableAccessKey** - Enter Access key for using the Tenable API. \n\n\t c. **TenableSecretKey** - Enter Tenable Secret Key for Authentication. \n\n\t d. **AzureClientID** - Enter Azure Client ID. \n\n\t e. **AzureClientSecret** - Enter Azure Client Secret. \n\n\t f. **TenantID** - Enter Tenant ID got from above steps. \n\n\t g. **AzureEntraObjectId** - Enter Azure Object ID got from above steps. \n\n\t h. **LowestSeveritytoStore** - Lowest vulnerability severity to store. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t i. **ComplianceDataIngestion** - Select true if you want to enable Compliance data ingestion from Tenable VM. Default is false. \n\n\t j. **WASAssetDataIngestion** - Select true if you want to enable WAS Asset data ingestion from Tenable VM. Default is false. \n\n\t k. **WASVulnerabilityDataIngestion** - Select true if you want to enable WAS Vulnerability data ingestion from Tenable VM. Default is false. \n\n\t l. **LowestSeveritytoStoreWAS** - The Lowest Vulnerability severity to store for WAS. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t m. **TenableExportScheduleInMinutes** - Schedule in minutes to create new export job from Tenable VM. Default is 1440. \n\n\t n. **AssetTableName** - Enter name of the table used to store Asset Data logs. \n\n\t o. **VulnTableName** - Enter name of the table used to store Vulnerability Data logs. \n\n\t p. **ComplianceTableName** - Enter name of the table used to store Compliance Data logs. \n\n\t q. **WASAssetTableName** - Enter name of the table used to store WAS Asset Data logs. \n\n\t r. **WASVulnTableName** - Enter name of the table used to store WAS Vulnerability Data logs. \n\n\t s. **PyTenableUAVendor** - Value must be set to **Microsoft**. \n\n\t t. **PyTenableUAProduct** - Value must be set to **Azure Sentinel**. \n\n\t u. **PyTenableUABuild** - Value must be set to **0.0.1**.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""Both a **TenableAccessKey** and a **TenableSecretKey** is required to access the Tenable REST API. [See the documentation to learn more about API](https://developer.tenable.com/reference#vulnerability-management). Check all [requirements and follow the instructions](https://docs.tenable.com/vulnerability-management/Content/Settings/my-account/GenerateAPIKey.htm) for obtaining credentials.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App/Data%20Connectors/TenableVM/TenableVM.json","true" +"Tenable_VM_Compliance_CL","Tenable App","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App","tenable","tenable-sentinel-integration","2024-06-06","2025-06-19","","Tenable","Partner","https://www.tenable.com/support/technical-support","","domains","TenableVM","Tenable","Tenable Vulnerability Management","The TVM data connector provides the ability to ingest Asset, Vulnerability, Compliance, WAS assets and WAS vulnerabilities data into Microsoft Sentinel using TVM REST APIs. Refer to [API documentation](https://developer.tenable.com/reference) for more information. The connector provides the ability to get data which helps to examine potential security risks, get insight into your computing assets, diagnose configuration problems and more","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Durable Functions to connect to the TenableVM API to pull [assets](https://developer.tenable.com/reference#exports-assets-download-chunk), [vulnerabilities](https://developer.tenable.com/reference#exports-vulns-request-export) and [compliance](https://developer.tenable.com/reference#exports-compliance-request-export)(if selected) at a regular interval into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a [**TenableVM parser for vulnerabilities**](https://aka.ms/sentinel-TenableApp-TenableVMVulnerabilities-parser) and a [**TenableVM parser for assets**](https://aka.ms/sentinel-TenableApp-TenableVMAssets-parser) based on a Kusto Function to work as expected which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for TenableVM**\n\n [Follow the instructions](https://docs.tenable.com/vulnerability-management/Content/Settings/my-account/GenerateAPIKey.htm) to obtain the required API credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TenableVM Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function App**\n\n""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TenableVM Vulnerability Management Report data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TenableVM-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-TenableVM-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group**, **FunctionApp Name** and **Location**. \n3. Enter the below information : \n\n\t a. **WorkspaceName** - Enter the Workspace Name of the log analytics Workspace. \n\n\t b. **TenableAccessKey** - Enter Access key for using the Tenable API. \n\n\t c. **TenableSecretKey** - Enter Tenable Secret Key for Authentication. \n\n\t d. **AzureClientID** - Enter Azure Client ID. \n\n\t e. **AzureClientSecret** - Enter Azure Client Secret. \n\n\t f. **TenantID** - Enter Tenant ID got from above steps. \n\n\t g. **AzureEntraObjectId** - Enter Azure Object ID got from above steps. \n\n\t h. **LowestSeveritytoStore** - Lowest vulnerability severity to store. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t i. **ComplianceDataIngestion** - Select true if you want to enable Compliance data ingestion from Tenable VM. Default is false. \n\n\t j. **WASAssetDataIngestion** - Select true if you want to enable WAS Asset data ingestion from Tenable VM. Default is false. \n\n\t k. **WASVulnerabilityDataIngestion** - Select true if you want to enable WAS Vulnerability data ingestion from Tenable VM. Default is false. \n\n\t l. **LowestSeveritytoStoreWAS** - The Lowest Vulnerability severity to store for WAS. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t m. **TenableExportScheduleInMinutes** - Schedule in minutes to create new export job from Tenable VM. Default is 1440. \n\n\t n. **AssetTableName** - Enter name of the table used to store Asset Data logs. \n\n\t o. **VulnTableName** - Enter name of the table used to store Vulnerability Data logs. \n\n\t p. **ComplianceTableName** - Enter name of the table used to store Compliance Data logs. \n\n\t q. **WASAssetTableName** - Enter name of the table used to store WAS Asset Data logs. \n\n\t r. **WASVulnTableName** - Enter name of the table used to store WAS Vulnerability Data logs. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TenableVM Vulnerability Management Report data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TenableVMAzureSentinelConnector310Updated-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. TenableVMXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\n\t a. **WorkspaceName** - Enter the Workspace Name of the log analytics Workspace. \n\n\t b. **TenableAccessKey** - Enter Access key for using the Tenable API. \n\n\t c. **TenableSecretKey** - Enter Tenable Secret Key for Authentication. \n\n\t d. **AzureClientID** - Enter Azure Client ID. \n\n\t e. **AzureClientSecret** - Enter Azure Client Secret. \n\n\t f. **TenantID** - Enter Tenant ID got from above steps. \n\n\t g. **AzureEntraObjectId** - Enter Azure Object ID got from above steps. \n\n\t h. **LowestSeveritytoStore** - Lowest vulnerability severity to store. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t i. **ComplianceDataIngestion** - Select true if you want to enable Compliance data ingestion from Tenable VM. Default is false. \n\n\t j. **WASAssetDataIngestion** - Select true if you want to enable WAS Asset data ingestion from Tenable VM. Default is false. \n\n\t k. **WASVulnerabilityDataIngestion** - Select true if you want to enable WAS Vulnerability data ingestion from Tenable VM. Default is false. \n\n\t l. **LowestSeveritytoStoreWAS** - The Lowest Vulnerability severity to store for WAS. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t m. **TenableExportScheduleInMinutes** - Schedule in minutes to create new export job from Tenable VM. Default is 1440. \n\n\t n. **AssetTableName** - Enter name of the table used to store Asset Data logs. \n\n\t o. **VulnTableName** - Enter name of the table used to store Vulnerability Data logs. \n\n\t p. **ComplianceTableName** - Enter name of the table used to store Compliance Data logs. \n\n\t q. **WASAssetTableName** - Enter name of the table used to store WAS Asset Data logs. \n\n\t r. **WASVulnTableName** - Enter name of the table used to store WAS Vulnerability Data logs. \n\n\t s. **PyTenableUAVendor** - Value must be set to **Microsoft**. \n\n\t t. **PyTenableUAProduct** - Value must be set to **Azure Sentinel**. \n\n\t u. **PyTenableUABuild** - Value must be set to **0.0.1**.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""Both a **TenableAccessKey** and a **TenableSecretKey** is required to access the Tenable REST API. [See the documentation to learn more about API](https://developer.tenable.com/reference#vulnerability-management). Check all [requirements and follow the instructions](https://docs.tenable.com/vulnerability-management/Content/Settings/my-account/GenerateAPIKey.htm) for obtaining credentials.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App/Data%20Connectors/TenableVM/TenableVM.json","true" +"Tenable_VM_Vuln_CL","Tenable App","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App","tenable","tenable-sentinel-integration","2024-06-06","2025-06-19","","Tenable","Partner","https://www.tenable.com/support/technical-support","","domains","TenableVM","Tenable","Tenable Vulnerability Management","The TVM data connector provides the ability to ingest Asset, Vulnerability, Compliance, WAS assets and WAS vulnerabilities data into Microsoft Sentinel using TVM REST APIs. Refer to [API documentation](https://developer.tenable.com/reference) for more information. The connector provides the ability to get data which helps to examine potential security risks, get insight into your computing assets, diagnose configuration problems and more","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Durable Functions to connect to the TenableVM API to pull [assets](https://developer.tenable.com/reference#exports-assets-download-chunk), [vulnerabilities](https://developer.tenable.com/reference#exports-vulns-request-export) and [compliance](https://developer.tenable.com/reference#exports-compliance-request-export)(if selected) at a regular interval into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a [**TenableVM parser for vulnerabilities**](https://aka.ms/sentinel-TenableApp-TenableVMVulnerabilities-parser) and a [**TenableVM parser for assets**](https://aka.ms/sentinel-TenableApp-TenableVMAssets-parser) based on a Kusto Function to work as expected which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for TenableVM**\n\n [Follow the instructions](https://docs.tenable.com/vulnerability-management/Content/Settings/my-account/GenerateAPIKey.htm) to obtain the required API credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TenableVM Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function App**\n\n""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TenableVM Vulnerability Management Report data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TenableVM-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-TenableVM-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group**, **FunctionApp Name** and **Location**. \n3. Enter the below information : \n\n\t a. **WorkspaceName** - Enter the Workspace Name of the log analytics Workspace. \n\n\t b. **TenableAccessKey** - Enter Access key for using the Tenable API. \n\n\t c. **TenableSecretKey** - Enter Tenable Secret Key for Authentication. \n\n\t d. **AzureClientID** - Enter Azure Client ID. \n\n\t e. **AzureClientSecret** - Enter Azure Client Secret. \n\n\t f. **TenantID** - Enter Tenant ID got from above steps. \n\n\t g. **AzureEntraObjectId** - Enter Azure Object ID got from above steps. \n\n\t h. **LowestSeveritytoStore** - Lowest vulnerability severity to store. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t i. **ComplianceDataIngestion** - Select true if you want to enable Compliance data ingestion from Tenable VM. Default is false. \n\n\t j. **WASAssetDataIngestion** - Select true if you want to enable WAS Asset data ingestion from Tenable VM. Default is false. \n\n\t k. **WASVulnerabilityDataIngestion** - Select true if you want to enable WAS Vulnerability data ingestion from Tenable VM. Default is false. \n\n\t l. **LowestSeveritytoStoreWAS** - The Lowest Vulnerability severity to store for WAS. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t m. **TenableExportScheduleInMinutes** - Schedule in minutes to create new export job from Tenable VM. Default is 1440. \n\n\t n. **AssetTableName** - Enter name of the table used to store Asset Data logs. \n\n\t o. **VulnTableName** - Enter name of the table used to store Vulnerability Data logs. \n\n\t p. **ComplianceTableName** - Enter name of the table used to store Compliance Data logs. \n\n\t q. **WASAssetTableName** - Enter name of the table used to store WAS Asset Data logs. \n\n\t r. **WASVulnTableName** - Enter name of the table used to store WAS Vulnerability Data logs. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TenableVM Vulnerability Management Report data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TenableVMAzureSentinelConnector310Updated-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. TenableVMXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\n\t a. **WorkspaceName** - Enter the Workspace Name of the log analytics Workspace. \n\n\t b. **TenableAccessKey** - Enter Access key for using the Tenable API. \n\n\t c. **TenableSecretKey** - Enter Tenable Secret Key for Authentication. \n\n\t d. **AzureClientID** - Enter Azure Client ID. \n\n\t e. **AzureClientSecret** - Enter Azure Client Secret. \n\n\t f. **TenantID** - Enter Tenant ID got from above steps. \n\n\t g. **AzureEntraObjectId** - Enter Azure Object ID got from above steps. \n\n\t h. **LowestSeveritytoStore** - Lowest vulnerability severity to store. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t i. **ComplianceDataIngestion** - Select true if you want to enable Compliance data ingestion from Tenable VM. Default is false. \n\n\t j. **WASAssetDataIngestion** - Select true if you want to enable WAS Asset data ingestion from Tenable VM. Default is false. \n\n\t k. **WASVulnerabilityDataIngestion** - Select true if you want to enable WAS Vulnerability data ingestion from Tenable VM. Default is false. \n\n\t l. **LowestSeveritytoStoreWAS** - The Lowest Vulnerability severity to store for WAS. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t m. **TenableExportScheduleInMinutes** - Schedule in minutes to create new export job from Tenable VM. Default is 1440. \n\n\t n. **AssetTableName** - Enter name of the table used to store Asset Data logs. \n\n\t o. **VulnTableName** - Enter name of the table used to store Vulnerability Data logs. \n\n\t p. **ComplianceTableName** - Enter name of the table used to store Compliance Data logs. \n\n\t q. **WASAssetTableName** - Enter name of the table used to store WAS Asset Data logs. \n\n\t r. **WASVulnTableName** - Enter name of the table used to store WAS Vulnerability Data logs. \n\n\t s. **PyTenableUAVendor** - Value must be set to **Microsoft**. \n\n\t t. **PyTenableUAProduct** - Value must be set to **Azure Sentinel**. \n\n\t u. **PyTenableUABuild** - Value must be set to **0.0.1**.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""Both a **TenableAccessKey** and a **TenableSecretKey** is required to access the Tenable REST API. [See the documentation to learn more about API](https://developer.tenable.com/reference#vulnerability-management). Check all [requirements and follow the instructions](https://docs.tenable.com/vulnerability-management/Content/Settings/my-account/GenerateAPIKey.htm) for obtaining credentials.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App/Data%20Connectors/TenableVM/TenableVM.json","true" +"Tenable_WAS_Asset_CL","Tenable App","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App","tenable","tenable-sentinel-integration","2024-06-06","2025-06-19","","Tenable","Partner","https://www.tenable.com/support/technical-support","","domains","TenableVM","Tenable","Tenable Vulnerability Management","The TVM data connector provides the ability to ingest Asset, Vulnerability, Compliance, WAS assets and WAS vulnerabilities data into Microsoft Sentinel using TVM REST APIs. Refer to [API documentation](https://developer.tenable.com/reference) for more information. The connector provides the ability to get data which helps to examine potential security risks, get insight into your computing assets, diagnose configuration problems and more","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Durable Functions to connect to the TenableVM API to pull [assets](https://developer.tenable.com/reference#exports-assets-download-chunk), [vulnerabilities](https://developer.tenable.com/reference#exports-vulns-request-export) and [compliance](https://developer.tenable.com/reference#exports-compliance-request-export)(if selected) at a regular interval into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a [**TenableVM parser for vulnerabilities**](https://aka.ms/sentinel-TenableApp-TenableVMVulnerabilities-parser) and a [**TenableVM parser for assets**](https://aka.ms/sentinel-TenableApp-TenableVMAssets-parser) based on a Kusto Function to work as expected which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for TenableVM**\n\n [Follow the instructions](https://docs.tenable.com/vulnerability-management/Content/Settings/my-account/GenerateAPIKey.htm) to obtain the required API credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TenableVM Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function App**\n\n""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TenableVM Vulnerability Management Report data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TenableVM-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-TenableVM-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group**, **FunctionApp Name** and **Location**. \n3. Enter the below information : \n\n\t a. **WorkspaceName** - Enter the Workspace Name of the log analytics Workspace. \n\n\t b. **TenableAccessKey** - Enter Access key for using the Tenable API. \n\n\t c. **TenableSecretKey** - Enter Tenable Secret Key for Authentication. \n\n\t d. **AzureClientID** - Enter Azure Client ID. \n\n\t e. **AzureClientSecret** - Enter Azure Client Secret. \n\n\t f. **TenantID** - Enter Tenant ID got from above steps. \n\n\t g. **AzureEntraObjectId** - Enter Azure Object ID got from above steps. \n\n\t h. **LowestSeveritytoStore** - Lowest vulnerability severity to store. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t i. **ComplianceDataIngestion** - Select true if you want to enable Compliance data ingestion from Tenable VM. Default is false. \n\n\t j. **WASAssetDataIngestion** - Select true if you want to enable WAS Asset data ingestion from Tenable VM. Default is false. \n\n\t k. **WASVulnerabilityDataIngestion** - Select true if you want to enable WAS Vulnerability data ingestion from Tenable VM. Default is false. \n\n\t l. **LowestSeveritytoStoreWAS** - The Lowest Vulnerability severity to store for WAS. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t m. **TenableExportScheduleInMinutes** - Schedule in minutes to create new export job from Tenable VM. Default is 1440. \n\n\t n. **AssetTableName** - Enter name of the table used to store Asset Data logs. \n\n\t o. **VulnTableName** - Enter name of the table used to store Vulnerability Data logs. \n\n\t p. **ComplianceTableName** - Enter name of the table used to store Compliance Data logs. \n\n\t q. **WASAssetTableName** - Enter name of the table used to store WAS Asset Data logs. \n\n\t r. **WASVulnTableName** - Enter name of the table used to store WAS Vulnerability Data logs. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TenableVM Vulnerability Management Report data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TenableVMAzureSentinelConnector310Updated-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. TenableVMXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\n\t a. **WorkspaceName** - Enter the Workspace Name of the log analytics Workspace. \n\n\t b. **TenableAccessKey** - Enter Access key for using the Tenable API. \n\n\t c. **TenableSecretKey** - Enter Tenable Secret Key for Authentication. \n\n\t d. **AzureClientID** - Enter Azure Client ID. \n\n\t e. **AzureClientSecret** - Enter Azure Client Secret. \n\n\t f. **TenantID** - Enter Tenant ID got from above steps. \n\n\t g. **AzureEntraObjectId** - Enter Azure Object ID got from above steps. \n\n\t h. **LowestSeveritytoStore** - Lowest vulnerability severity to store. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t i. **ComplianceDataIngestion** - Select true if you want to enable Compliance data ingestion from Tenable VM. Default is false. \n\n\t j. **WASAssetDataIngestion** - Select true if you want to enable WAS Asset data ingestion from Tenable VM. Default is false. \n\n\t k. **WASVulnerabilityDataIngestion** - Select true if you want to enable WAS Vulnerability data ingestion from Tenable VM. Default is false. \n\n\t l. **LowestSeveritytoStoreWAS** - The Lowest Vulnerability severity to store for WAS. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t m. **TenableExportScheduleInMinutes** - Schedule in minutes to create new export job from Tenable VM. Default is 1440. \n\n\t n. **AssetTableName** - Enter name of the table used to store Asset Data logs. \n\n\t o. **VulnTableName** - Enter name of the table used to store Vulnerability Data logs. \n\n\t p. **ComplianceTableName** - Enter name of the table used to store Compliance Data logs. \n\n\t q. **WASAssetTableName** - Enter name of the table used to store WAS Asset Data logs. \n\n\t r. **WASVulnTableName** - Enter name of the table used to store WAS Vulnerability Data logs. \n\n\t s. **PyTenableUAVendor** - Value must be set to **Microsoft**. \n\n\t t. **PyTenableUAProduct** - Value must be set to **Azure Sentinel**. \n\n\t u. **PyTenableUABuild** - Value must be set to **0.0.1**.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""Both a **TenableAccessKey** and a **TenableSecretKey** is required to access the Tenable REST API. [See the documentation to learn more about API](https://developer.tenable.com/reference#vulnerability-management). Check all [requirements and follow the instructions](https://docs.tenable.com/vulnerability-management/Content/Settings/my-account/GenerateAPIKey.htm) for obtaining credentials.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App/Data%20Connectors/TenableVM/TenableVM.json","true" +"Tenable_WAS_Vuln_CL","Tenable App","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App","tenable","tenable-sentinel-integration","2024-06-06","2025-06-19","","Tenable","Partner","https://www.tenable.com/support/technical-support","","domains","TenableVM","Tenable","Tenable Vulnerability Management","The TVM data connector provides the ability to ingest Asset, Vulnerability, Compliance, WAS assets and WAS vulnerabilities data into Microsoft Sentinel using TVM REST APIs. Refer to [API documentation](https://developer.tenable.com/reference) for more information. The connector provides the ability to get data which helps to examine potential security risks, get insight into your computing assets, diagnose configuration problems and more","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Durable Functions to connect to the TenableVM API to pull [assets](https://developer.tenable.com/reference#exports-assets-download-chunk), [vulnerabilities](https://developer.tenable.com/reference#exports-vulns-request-export) and [compliance](https://developer.tenable.com/reference#exports-compliance-request-export)(if selected) at a regular interval into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a [**TenableVM parser for vulnerabilities**](https://aka.ms/sentinel-TenableApp-TenableVMVulnerabilities-parser) and a [**TenableVM parser for assets**](https://aka.ms/sentinel-TenableApp-TenableVMAssets-parser) based on a Kusto Function to work as expected which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for TenableVM**\n\n [Follow the instructions](https://docs.tenable.com/vulnerability-management/Content/Settings/my-account/GenerateAPIKey.htm) to obtain the required API credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TenableVM Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function App**\n\n""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TenableVM Vulnerability Management Report data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TenableVM-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-TenableVM-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group**, **FunctionApp Name** and **Location**. \n3. Enter the below information : \n\n\t a. **WorkspaceName** - Enter the Workspace Name of the log analytics Workspace. \n\n\t b. **TenableAccessKey** - Enter Access key for using the Tenable API. \n\n\t c. **TenableSecretKey** - Enter Tenable Secret Key for Authentication. \n\n\t d. **AzureClientID** - Enter Azure Client ID. \n\n\t e. **AzureClientSecret** - Enter Azure Client Secret. \n\n\t f. **TenantID** - Enter Tenant ID got from above steps. \n\n\t g. **AzureEntraObjectId** - Enter Azure Object ID got from above steps. \n\n\t h. **LowestSeveritytoStore** - Lowest vulnerability severity to store. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t i. **ComplianceDataIngestion** - Select true if you want to enable Compliance data ingestion from Tenable VM. Default is false. \n\n\t j. **WASAssetDataIngestion** - Select true if you want to enable WAS Asset data ingestion from Tenable VM. Default is false. \n\n\t k. **WASVulnerabilityDataIngestion** - Select true if you want to enable WAS Vulnerability data ingestion from Tenable VM. Default is false. \n\n\t l. **LowestSeveritytoStoreWAS** - The Lowest Vulnerability severity to store for WAS. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t m. **TenableExportScheduleInMinutes** - Schedule in minutes to create new export job from Tenable VM. Default is 1440. \n\n\t n. **AssetTableName** - Enter name of the table used to store Asset Data logs. \n\n\t o. **VulnTableName** - Enter name of the table used to store Vulnerability Data logs. \n\n\t p. **ComplianceTableName** - Enter name of the table used to store Compliance Data logs. \n\n\t q. **WASAssetTableName** - Enter name of the table used to store WAS Asset Data logs. \n\n\t r. **WASVulnTableName** - Enter name of the table used to store WAS Vulnerability Data logs. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TenableVM Vulnerability Management Report data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TenableVMAzureSentinelConnector310Updated-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. TenableVMXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\n\t a. **WorkspaceName** - Enter the Workspace Name of the log analytics Workspace. \n\n\t b. **TenableAccessKey** - Enter Access key for using the Tenable API. \n\n\t c. **TenableSecretKey** - Enter Tenable Secret Key for Authentication. \n\n\t d. **AzureClientID** - Enter Azure Client ID. \n\n\t e. **AzureClientSecret** - Enter Azure Client Secret. \n\n\t f. **TenantID** - Enter Tenant ID got from above steps. \n\n\t g. **AzureEntraObjectId** - Enter Azure Object ID got from above steps. \n\n\t h. **LowestSeveritytoStore** - Lowest vulnerability severity to store. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t i. **ComplianceDataIngestion** - Select true if you want to enable Compliance data ingestion from Tenable VM. Default is false. \n\n\t j. **WASAssetDataIngestion** - Select true if you want to enable WAS Asset data ingestion from Tenable VM. Default is false. \n\n\t k. **WASVulnerabilityDataIngestion** - Select true if you want to enable WAS Vulnerability data ingestion from Tenable VM. Default is false. \n\n\t l. **LowestSeveritytoStoreWAS** - The Lowest Vulnerability severity to store for WAS. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t m. **TenableExportScheduleInMinutes** - Schedule in minutes to create new export job from Tenable VM. Default is 1440. \n\n\t n. **AssetTableName** - Enter name of the table used to store Asset Data logs. \n\n\t o. **VulnTableName** - Enter name of the table used to store Vulnerability Data logs. \n\n\t p. **ComplianceTableName** - Enter name of the table used to store Compliance Data logs. \n\n\t q. **WASAssetTableName** - Enter name of the table used to store WAS Asset Data logs. \n\n\t r. **WASVulnTableName** - Enter name of the table used to store WAS Vulnerability Data logs. \n\n\t s. **PyTenableUAVendor** - Value must be set to **Microsoft**. \n\n\t t. **PyTenableUAProduct** - Value must be set to **Azure Sentinel**. \n\n\t u. **PyTenableUABuild** - Value must be set to **0.0.1**.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""Both a **TenableAccessKey** and a **TenableSecretKey** is required to access the Tenable REST API. [See the documentation to learn more about API](https://developer.tenable.com/reference#vulnerability-management). Check all [requirements and follow the instructions](https://docs.tenable.com/vulnerability-management/Content/Settings/my-account/GenerateAPIKey.htm) for obtaining credentials.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App/Data%20Connectors/TenableVM/TenableVM.json","true" +"Tenable_ad_CL","TenableAD","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TenableAD","","","","","","","","","","","Tenable.ad","Tenable","Tenable.ad","Tenable.ad connector allows to export Tenable.ad Indicators of Exposures, trailflow and Indicators of Attacks logs to Azure Sentinel in real time.
It provides a data parser to manipulate the logs more easily. The different workbooks ease your Active Directory monitoring and provide different ways to visualize the data. The analytic templates allow to automate responses regarding different events, exposures, or attacks.","[{""title"": """", ""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://raw.githubusercontent.com/tenable/Azure-Sentinel/Tenable.ad-connector/Solutions/TenableAD/Parsers/afad_parser.kql) to create the Kusto Functions alias, **afad_parser**"", ""instructions"": []}, {""title"": ""1. Configure the Syslog server"", ""description"": ""You will first need a **linux Syslog** server that Tenable.ad will send logs to. Typically you can run **rsyslog** on **Ubuntu**.\n You can then configure this server as you wish, but it is recommended to be able to output Tenable.ad logs in a separate file.\n\nConfigure rsyslog to accept logs from your Tenable.ad IP address.:\n\n```shell\nsudo -i\n\n# Set Tenable.ad source IP address\nexport TENABLE_AD_IP={Enter your IP address}\n\n# Create rsyslog configuration file\ncat > /etc/rsyslog.d/80-tenable.conf << EOF\n\\$ModLoad imudp\n\\$UDPServerRun 514\n\\$ModLoad imtcp\n\\$InputTCPServerRun 514\n\\$AllowedSender TCP, 127.0.0.1, $TENABLE_AD_IP\n\\$AllowedSender UDP, 127.0.0.1, $TENABLE_AD_IP\n\\$template MsgTemplate,\""%TIMESTAMP:::date-rfc3339% %HOSTNAME% %programname%[%procid%]:%msg%\\n\""\n\\$template remote-incoming-logs, \""/var/log/%PROGRAMNAME%.log\""\n*.* ?remote-incoming-logs;MsgTemplate\nEOF\n\n# Restart rsyslog\nsystemctl restart rsyslog\n```""}, {""title"": ""2. Install and onboard the Microsoft agent for Linux"", ""description"": ""The OMS agent will receive the Tenable.ad syslog events and publish it in Sentinel :"", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""3. Check agent logs on the Syslog server"", ""description"": ""```shell\ntail -f /var/opt/microsoft/omsagent/log/omsagent.log\n```""}, {""title"": ""4. Configure Tenable.ad to send logs to your Syslog server"", ""description"": ""On your **Tenable.ad** portal, go to *System*, *Configuration* and then *Syslog*.\nFrom there you can create a new Syslog alert toward your Syslog server.\n\nOnce this is done, check that the logs are correctly gathered on your server in a separate file (to do this, you can use the *Test the configuration* button in the Syslog alert configuration in Tenable.ad).\nIf you used the Quickstart template, the Syslog server will by default listen on port 514 in UDP and 1514 in TCP, without TLS.""}, {""title"": ""5. Configure the custom logs"", ""description"": ""Configure the agent to collect the logs.\n\n1. In Sentinel, go to **Configuration** -> **Settings** -> **Workspace settings** -> **Custom logs**.\n2. Click **Add custom log**.\n3. Upload a sample Tenable.ad.log Syslog file from the **Linux** machine running the **Syslog** server and click **Next**\n4. Set the record delimiter to **New Line** if not already the case and click **Next**.\n5. Select **Linux** and enter the file path to the **Syslog** file, click **+** then **Next**. The default location of the file is `/var/log/Tenable.ad.log` if you have a Tenable version <3.1.0, you must also add this linux file location `/var/log/AlsidForAD.log`.\n6. Set the **Name** to *Tenable_ad_CL* (Azure automatically adds *_CL* at the end of the name, there must be only one, make sure the name is not *Tenable_ad_CL_CL*).\n7. Click **Next**, you will see a resume, then click **Create**\n"", ""instructions"": []}, {""title"": ""6. Enjoy !"", ""description"": ""> You should now be able to receive logs in the *Tenable_ad_CL* table, logs data can be parse using the **afad_parser()** function, used by all query samples, workbooks and analytic templates.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Access to Tenable.ad Configuration"", ""description"": ""Permissions to configure syslog alerting engine""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TenableAD/Data%20Connectors/Tenable.ad.json","true" +"Tenable_IO_Assets_CL","TenableIO","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TenableIO","tenable","tenable-sentinel-integration","2022-06-01","","","Tenable","Partner","https://www.tenable.com/support/technical-support","","domains","TenableIOAPI","Tenable","Tenable.io Vulnerability Management","The [Tenable.io](https://www.tenable.com/products/tenable-io) data connector provides the capability to ingest Asset and Vulnerability data into Microsoft Sentinel through the REST API from the Tenable.io platform (Managed in the cloud). Refer to [API documentation](https://developer.tenable.com/reference) for more information. The connector provides the ability to get data which helps to examine potential security risks, get insight into your computing assets, diagnose configuration problems and more","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Durable Functions to connect to the Tenable.io API to pull [assets](https://developer.tenable.com/reference#exports-assets-download-chunk) and [vulnerabilities](https://developer.tenable.com/reference#exports-vulns-request-export) at a regular interval into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a [**Tenable.io parser for vulnerabilities**](https://aka.ms/sentinel-TenableIO-TenableIOVulnerabilities-parser) and a [**Tenable.io parser for assets**](https://aka.ms/sentinel-TenableIO-TenableIOAssets-parser) based on a Kusto Function to work as expected which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for Tenable.io**\n\n [Follow the instructions](https://docs.tenable.com/tenableio/vulnerabilitymanagement/Content/Settings/GenerateAPIKey.htm) to obtain the required API credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function App**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Tenable.io Vulnerability Management Report data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TenableIO-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **TenableAccessKey** and **TenableSecretKey** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Tenable.io Vulnerability Management Report data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TenableIO-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. TenableIOXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tTenableAccessKey\n\t\tTenableSecretKey\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""Both a **TenableAccessKey** and a **TenableSecretKey** is required to access the Tenable REST API. [See the documentation to learn more about API](https://developer.tenable.com/reference#vulnerability-management). Check all [requirements and follow the instructions](https://docs.tenable.com/tenableio/vulnerabilitymanagement/Content/Settings/GenerateAPIKey.htm) for obtaining credentials.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TenableIO/Data%20Connectors/TenableIO.json","true" +"Tenable_IO_Vuln_CL","TenableIO","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TenableIO","tenable","tenable-sentinel-integration","2022-06-01","","","Tenable","Partner","https://www.tenable.com/support/technical-support","","domains","TenableIOAPI","Tenable","Tenable.io Vulnerability Management","The [Tenable.io](https://www.tenable.com/products/tenable-io) data connector provides the capability to ingest Asset and Vulnerability data into Microsoft Sentinel through the REST API from the Tenable.io platform (Managed in the cloud). Refer to [API documentation](https://developer.tenable.com/reference) for more information. The connector provides the ability to get data which helps to examine potential security risks, get insight into your computing assets, diagnose configuration problems and more","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Durable Functions to connect to the Tenable.io API to pull [assets](https://developer.tenable.com/reference#exports-assets-download-chunk) and [vulnerabilities](https://developer.tenable.com/reference#exports-vulns-request-export) at a regular interval into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a [**Tenable.io parser for vulnerabilities**](https://aka.ms/sentinel-TenableIO-TenableIOVulnerabilities-parser) and a [**Tenable.io parser for assets**](https://aka.ms/sentinel-TenableIO-TenableIOAssets-parser) based on a Kusto Function to work as expected which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for Tenable.io**\n\n [Follow the instructions](https://docs.tenable.com/tenableio/vulnerabilitymanagement/Content/Settings/GenerateAPIKey.htm) to obtain the required API credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function App**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Tenable.io Vulnerability Management Report data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TenableIO-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **TenableAccessKey** and **TenableSecretKey** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Tenable.io Vulnerability Management Report data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TenableIO-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. TenableIOXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tTenableAccessKey\n\t\tTenableSecretKey\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""Both a **TenableAccessKey** and a **TenableSecretKey** is required to access the Tenable REST API. [See the documentation to learn more about API](https://developer.tenable.com/reference#vulnerability-management). Check all [requirements and follow the instructions](https://docs.tenable.com/tenableio/vulnerabilitymanagement/Content/Settings/GenerateAPIKey.htm) for obtaining credentials.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TenableIO/Data%20Connectors/TenableIO.json","true" +"","TestSolution","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TestSolution","","","","","","","","","","domains","","","","","","","","false" +"TheHive_CL","TheHive","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TheHive","azuresentinel","azure-sentinel-solution-thehive","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","TheHiveProjectTheHive","TheHive Project","TheHive Project - TheHive","The [TheHive](http://thehive-project.org/) data connector provides the capability to ingest common TheHive events into Microsoft Sentinel through Webhooks. TheHive can notify external system of modification events (case creation, alert update, task assignment) in real time. When a change occurs in the TheHive, an HTTPS POST request with event information is sent to a callback data connector URL. Refer to [Webhooks documentation](https://docs.thehive-project.org/thehive/legacy/thehive3/admin/webhooks/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This data connector uses Azure Functions based on HTTP Trigger for waiting POST requests with logs to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**TheHive**](https://aka.ms/sentinel-TheHive-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the TheHive**\n\n Follow the [instructions](https://docs.thehive-project.org/thehive/installation-and-configuration/configuration/webhooks/) to configure Webhooks.\n\n1. Authentication method is *Beared Auth*.\n2. Generate the **TheHiveBearerToken** according to your password policy.\n3. Setup Webhook notifications in the *application.conf* file including **TheHiveBearerToken** parameter.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the TheHive data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TheHive data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TheHive-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **TheHiveBearerToken** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n6. After deploying open Function App page, select your app, go to the **Functions** and click **Get Function Url** copy it and follow p.7 from STEP 1.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TheHive data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TheHive-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tTheHiveBearerToken\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Webhooks Credentials/permissions"", ""description"": ""**TheHiveBearerToken**, **Callback URL** are required for working Webhooks. See the documentation to learn more about [configuring Webhooks](https://docs.thehive-project.org/thehive/installation-and-configuration/configuration/webhooks/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TheHive/Data%20Connectors/TheHive_Webhooks_FunctionApp.json","true" +"TheomAlerts_CL","Theom","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Theom","theominc1667512729960","theom_sentinel","2022-11-04","","","Theom","Partner","https://www.theom.ai","","domains","Theom","Theom","Theom","Theom Data Connector enables organizations to connect their Theom environment to Microsoft Sentinel. This solution enables users to receive alerts on data security risks, create and enrich incidents, check statistics and trigger SOAR playbooks in Microsoft Sentinel","[{""title"": """", ""description"": ""1. In **Theom UI Console** click on **Manage -> Alerts** on the side bar.\n2. Select **Sentinel** tab.\n3. Click on **Active** button to enable the configuration.\n4. Enter `Primary` key as `Authorization Token`\n5. Enter `Endpoint URL` as `https://.ods.opinsights.azure.com/api/logs?api-version=2016-04-01`\n6. Click on `SAVE SETTINGS`\n"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Theom/Data%20Connectors/Theom.json","true" +"ThreatIntelligenceIndicator","Threat Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence","azuresentinel","azure-sentinel-solution-threatintelligence-taxii","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","MicrosoftDefenderThreatIntelligence","Microsoft","Microsoft Defender Threat Intelligence","Microsoft Sentinel provides you the capability to import threat intelligence generated by Microsoft to enable monitoring, alerting and hunting. Use this data connector to import Indicators of Compromise (IOCs) from Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel. Threat indicators can include IP addresses, domains, URLs, and file hashes, etc.","[{""title"": ""Use this data connector to import Indicators of Compromise (IOCs) from Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel.""}, {""instructions"": [{""type"": ""MicrosoftThreatIntelligence"", ""parameters"": {""connectorKind"": ""MicrosoftThreatIntelligence""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence/Data%20Connectors/template_MicrosoftDefenderThreatIntelligence.json","true" +"ThreatIntelligenceIndicator","Threat Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence","azuresentinel","azure-sentinel-solution-threatintelligence-taxii","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","PremiumMicrosoftDefenderForThreatIntelligence","Microsoft","Premium Microsoft Defender Threat Intelligence","Microsoft Sentinel provides you the capability to import threat intelligence generated by Microsoft to enable monitoring, alerting and hunting. Use this data connector to import Indicators of Compromise (IOCs) from Premium Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel. Threat indicators can include IP addresses, domains, URLs, and file hashes, etc. Note: This is a paid connector. To use and ingest data from it, please purchase the ""MDTI API Access"" SKU from the Partner Center.","[{""title"": ""Use this data connector to import Indicators of Compromise (IOCs) from Premium Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel.""}, {""instructions"": [{""type"": ""PremiumMicrosoftDefenderForThreatIntelligence"", ""parameters"": {""connectorKind"": ""PremiumMicrosoftDefenderForThreatIntelligence""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence/Data%20Connectors/template_PremiumMicrosoftDefenderThreatIntelligence.json","true" +"CommonSecurityLog","Threat Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence","azuresentinel","azure-sentinel-solution-threatintelligence-taxii","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligence","Microsoft","Threat Intelligence Platforms","Microsoft Sentinel integrates with Microsoft Graph Security API data sources to enable monitoring, alerting, and hunting using your threat intelligence. Use this connector to send threat indicators to Microsoft Sentinel from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MindMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, and file hashes. For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2223729&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""You can connect your threat intelligence data sources to Microsoft Sentinel by either:"", ""description"": ""- Using an integrated Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MindMeld, MISP, and others.\n\n- Calling the Microsoft Graph Security API directly from another application.""}, {""title"": ""Follow These Steps to Connect your Threat Intelligence:"", ""description"": ""1) [Register an application](https://docs.microsoft.com/graph/auth-v2-service#1-register-your-app) in Azure Active Directory.\n\n2) [Configure permissions](https://docs.microsoft.com/graph/auth-v2-service#2-configure-permissions-for-microsoft-graph) and be sure to add the ThreatIndicators.ReadWrite.OwnedBy permission to the application.\n\n3) Ask your Azure AD tenant administrator to [grant consent](https://docs.microsoft.com/graph/auth-v2-service#3-get-administrator-consent) to the application.\n\n4) Configure your TIP or other integrated application to push indicators to Microsoft Sentinel by specifying the following:\n\n a. The application ID and secret you received when registering the app (step 1 above). \n\n b. Set \u201cMicrosoft Sentinel\u201d as the target.\n\n c. Set an action for each indicator - \u2018alert\u2019 is most relevant for Microsoft Sentinel use cases \n\nFor the latest list of integrated Threat Intelligence Platforms and detailed configuration instructions, see the [full documentation](https://docs.microsoft.com/azure/sentinel/import-threat-intelligence#adding-threat-indicators-to-azure-sentinel-with-the-threat-intelligence-platforms-data-connector).\n\nClick on \""Connect\"" below\n\n> Data from all regions will be sent to and stored in the workspace's region."", ""instructions"": [{""parameters"": {}, ""type"": ""ThreatIntelligence""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""tenant"": [""GlobalAdmin"", ""SecurityAdmin""]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence/Data%20Connectors/template_ThreatIntelligence.json","true" +"ThreatIntelligenceIndicator","Threat Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence","azuresentinel","azure-sentinel-solution-threatintelligence-taxii","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligence","Microsoft","Threat Intelligence Platforms","Microsoft Sentinel integrates with Microsoft Graph Security API data sources to enable monitoring, alerting, and hunting using your threat intelligence. Use this connector to send threat indicators to Microsoft Sentinel from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MindMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, and file hashes. For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2223729&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""You can connect your threat intelligence data sources to Microsoft Sentinel by either:"", ""description"": ""- Using an integrated Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MindMeld, MISP, and others.\n\n- Calling the Microsoft Graph Security API directly from another application.""}, {""title"": ""Follow These Steps to Connect your Threat Intelligence:"", ""description"": ""1) [Register an application](https://docs.microsoft.com/graph/auth-v2-service#1-register-your-app) in Azure Active Directory.\n\n2) [Configure permissions](https://docs.microsoft.com/graph/auth-v2-service#2-configure-permissions-for-microsoft-graph) and be sure to add the ThreatIndicators.ReadWrite.OwnedBy permission to the application.\n\n3) Ask your Azure AD tenant administrator to [grant consent](https://docs.microsoft.com/graph/auth-v2-service#3-get-administrator-consent) to the application.\n\n4) Configure your TIP or other integrated application to push indicators to Microsoft Sentinel by specifying the following:\n\n a. The application ID and secret you received when registering the app (step 1 above). \n\n b. Set \u201cMicrosoft Sentinel\u201d as the target.\n\n c. Set an action for each indicator - \u2018alert\u2019 is most relevant for Microsoft Sentinel use cases \n\nFor the latest list of integrated Threat Intelligence Platforms and detailed configuration instructions, see the [full documentation](https://docs.microsoft.com/azure/sentinel/import-threat-intelligence#adding-threat-indicators-to-azure-sentinel-with-the-threat-intelligence-platforms-data-connector).\n\nClick on \""Connect\"" below\n\n> Data from all regions will be sent to and stored in the workspace's region."", ""instructions"": [{""parameters"": {}, ""type"": ""ThreatIntelligence""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""tenant"": [""GlobalAdmin"", ""SecurityAdmin""]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence/Data%20Connectors/template_ThreatIntelligence.json","true" +"ThreatIntelligenceIndicator","Threat Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence","azuresentinel","azure-sentinel-solution-threatintelligence-taxii","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligenceTaxii","Microsoft","Threat intelligence - TAXII","Microsoft Sentinel integrates with TAXII 2.0 and 2.1 data sources to enable monitoring, alerting, and hunting using your threat intelligence. Use this connector to send the supported STIX object types from TAXII servers to Microsoft Sentinel. Threat indicators can include IP addresses, domains, URLs, and file hashes. For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2224105&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Configure TAXII servers to stream STIX 2.0 or 2.1 STIX objects to Microsoft Sentinel"", ""description"": ""You can connect your TAXII servers to Microsoft Sentinel using the built-in TAXII connector. For detailed configuration instructions, see the [full documentation](https://docs.microsoft.com/azure/sentinel/import-threat-intelligence#adding-threat-indicators-to-azure-sentinel-with-the-threat-intelligence---taxii-data-connector). \n\nEnter the following information and select Add to configure your TAXII server."", ""instructions"": [{""parameters"": {}, ""type"": ""ThreatIntelligenceTaxii""}]}]","{""customs"": [{""name"": ""TAXII Server"", ""description"": ""TAXII 2.0 or TAXII 2.1 Server URI and Collection ID.""}], ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence/Data%20Connectors/template_ThreatIntelligenceTaxii.json","true" +"ThreatIntelligenceIndicator","Threat Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence","azuresentinel","azure-sentinel-solution-threatintelligence-taxii","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligenceUploadIndicatorsAPI","Microsoft","Threat Intelligence Upload API (Preview)","Microsoft Sentinel offers a data plane API to bring in threat intelligence from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, file hashes and email addresses. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2269830&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""You can connect your threat intelligence data sources to Microsoft Sentinel by either: "", ""description"": ""\n>Using an integrated Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, and others. \n\n>Calling the Microsoft Sentinel data plane API directly from another application. \n - Note: The 'Status' of the connector will not appear as 'Connected' here, because the data is ingested by making an API call.""}, {""title"": ""Follow These Steps to Connect to your Threat Intelligence: "", ""description"": """"}, {""title"": ""1. Get Microsoft Entra ID Access Token"", ""description"": ""To send request to the APIs, you need to acquire Microsoft Entra ID access token. You can follow instruction in this page: https://docs.microsoft.com/azure/databricks/dev-tools/api/latest/aad/app-aad-token#get-an-azure-ad-access-token \n - Notice: Please request Microsoft Entra ID access token with scope value: \nFairfax: https://management.usgovcloudapi.net/.default \nMooncake: https://management.chinacloudapi.cn/.default ""}, {""title"": ""2. Send STIX objects to Sentinel"", ""description"": ""You can send the supported STIX object types by calling our Upload API. For more information about the API, click [here](https://learn.microsoft.com/azure/sentinel/stix-objects-api). \n\n>HTTP method: POST \n\n>Endpoint: \nFairfax: https://api.ti.sentinel.azure.us/workspaces/[WorkspaceID]/threatintelligence-stix-objects:upload?api-version=2024-02-01-preview \nMooncake: https://api.ti.sentinel.azure.cn/workspaces/[WorkspaceID]/threatintelligence-stix-objects:upload?api-version=2024-02-01-preview \n\n>WorkspaceID: the workspace that the STIX objects are uploaded to. \n\n\n>Header Value 1: \""Authorization\"" = \""Bearer [Microsoft Entra ID Access Token from step 1]\"" \n\n\n> Header Value 2: \""Content-Type\"" = \""application/json\"" \n \n>Body: The body is a JSON object containing an array of STIX objects.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.SecurityInsights/threatintelligence/write"", ""permissionsDisplayText"": ""write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence/Data%20Connectors/template_ThreatIntelligenceUploadIndicators_ForGov.json","true" +"ThreatIntelligenceIndicator","Threat Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence","azuresentinel","azure-sentinel-solution-threatintelligence-taxii","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligenceUploadIndicatorsAPI","Microsoft","Threat Intelligence Upload API (Preview)","Microsoft Sentinel offers a data plane API to bring in threat intelligence from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, file hashes and email addresses. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2269830&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""You can connect your threat intelligence data sources to Microsoft Sentinel by either: "", ""description"": ""\n>Using an integrated Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, and others. \n\n>Calling the Microsoft Sentinel data plane API directly from another application. \n - Note: The 'Status' of the connector will not appear as 'Connected' here, because the data is ingested by making an API call.""}, {""title"": ""Follow These Steps to Connect to your Threat Intelligence: "", ""description"": """"}, {""title"": ""1. Get Microsoft Entra ID Access Token"", ""description"": ""To send request to the APIs, you need to acquire Microsoft Entra ID access token. You can follow instruction in this page: https://docs.microsoft.com/azure/databricks/dev-tools/api/latest/aad/app-aad-token#get-an-azure-ad-access-token \n - Notice: Please request Microsoft Entra ID access token with scope value: https://management.azure.com/.default ""}, {""title"": ""2. Send STIX objects to Sentinel"", ""description"": ""You can send the supported STIX object types by calling our Upload API. For more information about the API, click [here](https://learn.microsoft.com/azure/sentinel/stix-objects-api). \n\n>HTTP method: POST \n\n>Endpoint: https://api.ti.sentinel.azure.com/workspaces/[WorkspaceID]/threatintelligence-stix-objects:upload?api-version=2024-02-01-preview \n\n>WorkspaceID: the workspace that the STIX objects are uploaded to. \n\n\n>Header Value 1: \""Authorization\"" = \""Bearer [Microsoft Entra ID Access Token from step 1]\"" \n\n\n> Header Value 2: \""Content-Type\"" = \""application/json\"" \n \n>Body: The body is a JSON object containing an array of STIX objects.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.SecurityInsights/threatintelligence/write"", ""permissionsDisplayText"": ""write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence/Data%20Connectors/template_ThreatIntelligenceUploadIndicators.json","true" +"ThreatIntelIndicators","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","MicrosoftDefenderThreatIntelligence","Microsoft","Microsoft Defender Threat Intelligence","Microsoft Sentinel provides you the capability to import threat intelligence generated by Microsoft to enable monitoring, alerting and hunting. Use this data connector to import Indicators of Compromise (IOCs) from Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel. Threat indicators can include IP addresses, domains, URLs, and file hashes, etc.","[{""title"": ""Use this data connector to import Indicators of Compromise (IOCs) from Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel.""}, {""instructions"": [{""type"": ""MicrosoftThreatIntelligence"", ""parameters"": {""connectorKind"": ""MicrosoftThreatIntelligence""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_MicrosoftDefenderThreatIntelligence.json","true" +"ThreatIntelObjects","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","MicrosoftDefenderThreatIntelligence","Microsoft","Microsoft Defender Threat Intelligence","Microsoft Sentinel provides you the capability to import threat intelligence generated by Microsoft to enable monitoring, alerting and hunting. Use this data connector to import Indicators of Compromise (IOCs) from Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel. Threat indicators can include IP addresses, domains, URLs, and file hashes, etc.","[{""title"": ""Use this data connector to import Indicators of Compromise (IOCs) from Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel.""}, {""instructions"": [{""type"": ""MicrosoftThreatIntelligence"", ""parameters"": {""connectorKind"": ""MicrosoftThreatIntelligence""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_MicrosoftDefenderThreatIntelligence.json","true" +"ThreatIntelIndicators","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","PremiumMicrosoftDefenderForThreatIntelligence","Microsoft","Premium Microsoft Defender Threat Intelligence","Microsoft Sentinel provides you the capability to import threat intelligence generated by Microsoft to enable monitoring, alerting and hunting. Use this data connector to import Indicators of Compromise (IOCs) from Premium Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel. Threat indicators can include IP addresses, domains, URLs, and file hashes, etc. Note: This is a paid connector. To use and ingest data from it, please purchase the ""MDTI API Access"" SKU from the Partner Center.","[{""title"": ""Use this data connector to import Indicators of Compromise (IOCs) from Premium Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel.""}, {""instructions"": [{""type"": ""PremiumMicrosoftDefenderForThreatIntelligence"", ""parameters"": {""connectorKind"": ""PremiumMicrosoftDefenderForThreatIntelligence""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_PremiumMicrosoftDefenderThreatIntelligence.json","true" +"ThreatIntelObjects","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","PremiumMicrosoftDefenderForThreatIntelligence","Microsoft","Premium Microsoft Defender Threat Intelligence","Microsoft Sentinel provides you the capability to import threat intelligence generated by Microsoft to enable monitoring, alerting and hunting. Use this data connector to import Indicators of Compromise (IOCs) from Premium Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel. Threat indicators can include IP addresses, domains, URLs, and file hashes, etc. Note: This is a paid connector. To use and ingest data from it, please purchase the ""MDTI API Access"" SKU from the Partner Center.","[{""title"": ""Use this data connector to import Indicators of Compromise (IOCs) from Premium Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel.""}, {""instructions"": [{""type"": ""PremiumMicrosoftDefenderForThreatIntelligence"", ""parameters"": {""connectorKind"": ""PremiumMicrosoftDefenderForThreatIntelligence""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_PremiumMicrosoftDefenderThreatIntelligence.json","true" +"CommonSecurityLog","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligence","Microsoft","Threat Intelligence Platforms","Microsoft Sentinel integrates with Microsoft Graph Security API data sources to enable monitoring, alerting, and hunting using your threat intelligence. Use this connector to send threat indicators to Microsoft Sentinel from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MindMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, and file hashes. For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2223729&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""You can connect your threat intelligence data sources to Microsoft Sentinel by either:"", ""description"": ""- Using an integrated Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MindMeld, MISP, and others.\n\n- Calling the Microsoft Graph Security API directly from another application.""}, {""title"": ""Follow These Steps to Connect your Threat Intelligence:"", ""description"": ""1) [Register an application](https://docs.microsoft.com/graph/auth-v2-service#1-register-your-app) in Azure Active Directory.\n\n2) [Configure permissions](https://docs.microsoft.com/graph/auth-v2-service#2-configure-permissions-for-microsoft-graph) and be sure to add the ThreatIndicators.ReadWrite.OwnedBy permission to the application.\n\n3) Ask your Azure AD tenant administrator to [grant consent](https://docs.microsoft.com/graph/auth-v2-service#3-get-administrator-consent) to the application.\n\n4) Configure your TIP or other integrated application to push indicators to Microsoft Sentinel by specifying the following:\n\n a. The application ID and secret you received when registering the app (step 1 above). \n\n b. Set \u201cMicrosoft Sentinel\u201d as the target.\n\n c. Set an action for each indicator - \u2018alert\u2019 is most relevant for Microsoft Sentinel use cases \n\nFor the latest list of integrated Threat Intelligence Platforms and detailed configuration instructions, see the [full documentation](https://docs.microsoft.com/azure/sentinel/import-threat-intelligence#adding-threat-indicators-to-azure-sentinel-with-the-threat-intelligence-platforms-data-connector).\n\nClick on \""Connect\"" below\n\n> Data from all regions will be sent to and stored in the workspace's region."", ""instructions"": [{""parameters"": {}, ""type"": ""ThreatIntelligence""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""tenant"": [""GlobalAdmin"", ""SecurityAdmin""]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligence.json","true" +"ThreatIntelIndicators","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligence","Microsoft","Threat Intelligence Platforms","Microsoft Sentinel integrates with Microsoft Graph Security API data sources to enable monitoring, alerting, and hunting using your threat intelligence. Use this connector to send threat indicators to Microsoft Sentinel from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MindMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, and file hashes. For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2223729&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""You can connect your threat intelligence data sources to Microsoft Sentinel by either:"", ""description"": ""- Using an integrated Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MindMeld, MISP, and others.\n\n- Calling the Microsoft Graph Security API directly from another application.""}, {""title"": ""Follow These Steps to Connect your Threat Intelligence:"", ""description"": ""1) [Register an application](https://docs.microsoft.com/graph/auth-v2-service#1-register-your-app) in Azure Active Directory.\n\n2) [Configure permissions](https://docs.microsoft.com/graph/auth-v2-service#2-configure-permissions-for-microsoft-graph) and be sure to add the ThreatIndicators.ReadWrite.OwnedBy permission to the application.\n\n3) Ask your Azure AD tenant administrator to [grant consent](https://docs.microsoft.com/graph/auth-v2-service#3-get-administrator-consent) to the application.\n\n4) Configure your TIP or other integrated application to push indicators to Microsoft Sentinel by specifying the following:\n\n a. The application ID and secret you received when registering the app (step 1 above). \n\n b. Set \u201cMicrosoft Sentinel\u201d as the target.\n\n c. Set an action for each indicator - \u2018alert\u2019 is most relevant for Microsoft Sentinel use cases \n\nFor the latest list of integrated Threat Intelligence Platforms and detailed configuration instructions, see the [full documentation](https://docs.microsoft.com/azure/sentinel/import-threat-intelligence#adding-threat-indicators-to-azure-sentinel-with-the-threat-intelligence-platforms-data-connector).\n\nClick on \""Connect\"" below\n\n> Data from all regions will be sent to and stored in the workspace's region."", ""instructions"": [{""parameters"": {}, ""type"": ""ThreatIntelligence""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""tenant"": [""GlobalAdmin"", ""SecurityAdmin""]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligence.json","true" +"ThreatIntelObjects","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligence","Microsoft","Threat Intelligence Platforms","Microsoft Sentinel integrates with Microsoft Graph Security API data sources to enable monitoring, alerting, and hunting using your threat intelligence. Use this connector to send threat indicators to Microsoft Sentinel from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MindMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, and file hashes. For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2223729&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""You can connect your threat intelligence data sources to Microsoft Sentinel by either:"", ""description"": ""- Using an integrated Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MindMeld, MISP, and others.\n\n- Calling the Microsoft Graph Security API directly from another application.""}, {""title"": ""Follow These Steps to Connect your Threat Intelligence:"", ""description"": ""1) [Register an application](https://docs.microsoft.com/graph/auth-v2-service#1-register-your-app) in Azure Active Directory.\n\n2) [Configure permissions](https://docs.microsoft.com/graph/auth-v2-service#2-configure-permissions-for-microsoft-graph) and be sure to add the ThreatIndicators.ReadWrite.OwnedBy permission to the application.\n\n3) Ask your Azure AD tenant administrator to [grant consent](https://docs.microsoft.com/graph/auth-v2-service#3-get-administrator-consent) to the application.\n\n4) Configure your TIP or other integrated application to push indicators to Microsoft Sentinel by specifying the following:\n\n a. The application ID and secret you received when registering the app (step 1 above). \n\n b. Set \u201cMicrosoft Sentinel\u201d as the target.\n\n c. Set an action for each indicator - \u2018alert\u2019 is most relevant for Microsoft Sentinel use cases \n\nFor the latest list of integrated Threat Intelligence Platforms and detailed configuration instructions, see the [full documentation](https://docs.microsoft.com/azure/sentinel/import-threat-intelligence#adding-threat-indicators-to-azure-sentinel-with-the-threat-intelligence-platforms-data-connector).\n\nClick on \""Connect\"" below\n\n> Data from all regions will be sent to and stored in the workspace's region."", ""instructions"": [{""parameters"": {}, ""type"": ""ThreatIntelligence""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""tenant"": [""GlobalAdmin"", ""SecurityAdmin""]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligence.json","true" +"ThreatIntelIndicators","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligenceTaxii","Microsoft","Threat intelligence - TAXII","Microsoft Sentinel integrates with TAXII 2.0 and 2.1 data sources to enable monitoring, alerting, and hunting using your threat intelligence. Use this connector to send the supported STIX object types from TAXII servers to Microsoft Sentinel. Threat indicators can include IP addresses, domains, URLs, and file hashes. For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2224105&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Configure TAXII servers to stream STIX 2.0 or 2.1 STIX objects to Microsoft Sentinel"", ""description"": ""You can connect your TAXII servers to Microsoft Sentinel using the built-in TAXII connector. For detailed configuration instructions, see the [full documentation](https://docs.microsoft.com/azure/sentinel/import-threat-intelligence#adding-threat-indicators-to-azure-sentinel-with-the-threat-intelligence---taxii-data-connector). \n\nEnter the following information and select Add to configure your TAXII server."", ""instructions"": [{""parameters"": {}, ""type"": ""ThreatIntelligenceTaxii""}]}]","{""customs"": [{""name"": ""TAXII Server"", ""description"": ""TAXII 2.0 or TAXII 2.1 Server URI and Collection ID.""}], ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligenceTaxii.json","true" +"ThreatIntelObjects","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligenceTaxii","Microsoft","Threat intelligence - TAXII","Microsoft Sentinel integrates with TAXII 2.0 and 2.1 data sources to enable monitoring, alerting, and hunting using your threat intelligence. Use this connector to send the supported STIX object types from TAXII servers to Microsoft Sentinel. Threat indicators can include IP addresses, domains, URLs, and file hashes. For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2224105&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Configure TAXII servers to stream STIX 2.0 or 2.1 STIX objects to Microsoft Sentinel"", ""description"": ""You can connect your TAXII servers to Microsoft Sentinel using the built-in TAXII connector. For detailed configuration instructions, see the [full documentation](https://docs.microsoft.com/azure/sentinel/import-threat-intelligence#adding-threat-indicators-to-azure-sentinel-with-the-threat-intelligence---taxii-data-connector). \n\nEnter the following information and select Add to configure your TAXII server."", ""instructions"": [{""parameters"": {}, ""type"": ""ThreatIntelligenceTaxii""}]}]","{""customs"": [{""name"": ""TAXII Server"", ""description"": ""TAXII 2.0 or TAXII 2.1 Server URI and Collection ID.""}], ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligenceTaxii.json","true" +"ThreatIntelExportOperation","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligenceTaxiiExport","Microsoft","Threat intelligence - TAXII Export (Preview)","Microsoft Sentinel integrates with TAXII 2.1 servers to enable exporting of your threat intelligence objects. Use this connector to send the supported STIX object types from Microsoft Sentinel to TAXII servers.","[{""title"": ""Configure TAXII servers to export STIX 2.1 objects to. Once configured, you can start exporting STIX objects from your TI repository"", ""instructions"": [{""parameters"": {""isExport"": true}, ""type"": ""ThreatIntelligenceTaxii""}]}]","{""customs"": [{""name"": ""TAXII Server"", ""description"": ""TAXII 2.1 Server URL and Collection ID.""}], ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligenceTaxiiExport.json","true" +"ThreatIntelIndicators","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligenceUploadIndicatorsAPI","Microsoft","Threat Intelligence Upload API (Preview)","Microsoft Sentinel offers a data plane API to bring in threat intelligence from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, file hashes and email addresses. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2269830&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""You can connect your threat intelligence data sources to Microsoft Sentinel by either: "", ""description"": ""\n>Using an integrated Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, and others. \n\n>Calling the Microsoft Sentinel data plane API directly from another application. \n - Note: The 'Status' of the connector will not appear as 'Connected' here, because the data is ingested by making an API call.""}, {""title"": ""Follow These Steps to Connect to your Threat Intelligence: "", ""description"": """"}, {""title"": ""1. Get Microsoft Entra ID Access Token"", ""description"": ""To send request to the APIs, you need to acquire Microsoft Entra ID access token. You can follow instruction in this page: https://docs.microsoft.com/azure/databricks/dev-tools/api/latest/aad/app-aad-token#get-an-azure-ad-access-token \n - Notice: Please request Microsoft Entra ID access token with scope value: \nFairfax: https://management.usgovcloudapi.net/.default \nMooncake: https://management.chinacloudapi.cn/.default ""}, {""title"": ""2. Send STIX objects to Sentinel"", ""description"": ""You can send the supported STIX object types by calling our Upload API. For more information about the API, click [here](https://learn.microsoft.com/azure/sentinel/stix-objects-api). \n\n>HTTP method: POST \n\n>Endpoint: \nFairfax: https://api.ti.sentinel.azure.us/workspaces/[WorkspaceID]/threatintelligence-stix-objects:upload?api-version=2024-02-01-preview \nMooncake: https://api.ti.sentinel.azure.cn/workspaces/[WorkspaceID]/threatintelligence-stix-objects:upload?api-version=2024-02-01-preview \n\n>WorkspaceID: the workspace that the STIX objects are uploaded to. \n\n\n>Header Value 1: \""Authorization\"" = \""Bearer [Microsoft Entra ID Access Token from step 1]\"" \n\n\n> Header Value 2: \""Content-Type\"" = \""application/json\"" \n \n>Body: The body is a JSON object containing an array of STIX objects.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.SecurityInsights/threatintelligence/write"", ""permissionsDisplayText"": ""write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligenceUploadIndicators_ForGov.json","true" +"ThreatIntelObjects","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligenceUploadIndicatorsAPI","Microsoft","Threat Intelligence Upload API (Preview)","Microsoft Sentinel offers a data plane API to bring in threat intelligence from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, file hashes and email addresses. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2269830&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""You can connect your threat intelligence data sources to Microsoft Sentinel by either: "", ""description"": ""\n>Using an integrated Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, and others. \n\n>Calling the Microsoft Sentinel data plane API directly from another application. \n - Note: The 'Status' of the connector will not appear as 'Connected' here, because the data is ingested by making an API call.""}, {""title"": ""Follow These Steps to Connect to your Threat Intelligence: "", ""description"": """"}, {""title"": ""1. Get Microsoft Entra ID Access Token"", ""description"": ""To send request to the APIs, you need to acquire Microsoft Entra ID access token. You can follow instruction in this page: https://docs.microsoft.com/azure/databricks/dev-tools/api/latest/aad/app-aad-token#get-an-azure-ad-access-token \n - Notice: Please request Microsoft Entra ID access token with scope value: \nFairfax: https://management.usgovcloudapi.net/.default \nMooncake: https://management.chinacloudapi.cn/.default ""}, {""title"": ""2. Send STIX objects to Sentinel"", ""description"": ""You can send the supported STIX object types by calling our Upload API. For more information about the API, click [here](https://learn.microsoft.com/azure/sentinel/stix-objects-api). \n\n>HTTP method: POST \n\n>Endpoint: \nFairfax: https://api.ti.sentinel.azure.us/workspaces/[WorkspaceID]/threatintelligence-stix-objects:upload?api-version=2024-02-01-preview \nMooncake: https://api.ti.sentinel.azure.cn/workspaces/[WorkspaceID]/threatintelligence-stix-objects:upload?api-version=2024-02-01-preview \n\n>WorkspaceID: the workspace that the STIX objects are uploaded to. \n\n\n>Header Value 1: \""Authorization\"" = \""Bearer [Microsoft Entra ID Access Token from step 1]\"" \n\n\n> Header Value 2: \""Content-Type\"" = \""application/json\"" \n \n>Body: The body is a JSON object containing an array of STIX objects.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.SecurityInsights/threatintelligence/write"", ""permissionsDisplayText"": ""write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligenceUploadIndicators_ForGov.json","true" +"ThreatIntelIndicators","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligenceUploadIndicatorsAPI","Microsoft","Threat Intelligence Upload API (Preview)","Microsoft Sentinel offers a data plane API to bring in threat intelligence from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, file hashes and email addresses. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2269830&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""You can connect your threat intelligence data sources to Microsoft Sentinel by either: "", ""description"": ""\n>Using an integrated Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, and others. \n\n>Calling the Microsoft Sentinel data plane API directly from another application. \n - Note: The 'Status' of the connector will not appear as 'Connected' here, because the data is ingested by making an API call.""}, {""title"": ""Follow These Steps to Connect to your Threat Intelligence: "", ""description"": """"}, {""title"": ""1. Get Microsoft Entra ID Access Token"", ""description"": ""To send request to the APIs, you need to acquire Microsoft Entra ID access token. You can follow instruction in this page: https://docs.microsoft.com/azure/databricks/dev-tools/api/latest/aad/app-aad-token#get-an-azure-ad-access-token \n - Notice: Please request Microsoft Entra ID access token with scope value: https://management.azure.com/.default ""}, {""title"": ""2. Send STIX objects to Sentinel"", ""description"": ""You can send the supported STIX object types by calling our Upload API. For more information about the API, click [here](https://learn.microsoft.com/azure/sentinel/stix-objects-api). \n\n>HTTP method: POST \n\n>Endpoint: https://api.ti.sentinel.azure.com/workspaces/[WorkspaceID]/threatintelligence-stix-objects:upload?api-version=2024-02-01-preview \n\n>WorkspaceID: the workspace that the STIX objects are uploaded to. \n\n\n>Header Value 1: \""Authorization\"" = \""Bearer [Microsoft Entra ID Access Token from step 1]\"" \n\n\n> Header Value 2: \""Content-Type\"" = \""application/json\"" \n \n>Body: The body is a JSON object containing an array of STIX objects.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.SecurityInsights/threatintelligence/write"", ""permissionsDisplayText"": ""write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligenceUploadIndicators.json","true" +"ThreatIntelObjects","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligenceUploadIndicatorsAPI","Microsoft","Threat Intelligence Upload API (Preview)","Microsoft Sentinel offers a data plane API to bring in threat intelligence from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, file hashes and email addresses. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2269830&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""You can connect your threat intelligence data sources to Microsoft Sentinel by either: "", ""description"": ""\n>Using an integrated Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, and others. \n\n>Calling the Microsoft Sentinel data plane API directly from another application. \n - Note: The 'Status' of the connector will not appear as 'Connected' here, because the data is ingested by making an API call.""}, {""title"": ""Follow These Steps to Connect to your Threat Intelligence: "", ""description"": """"}, {""title"": ""1. Get Microsoft Entra ID Access Token"", ""description"": ""To send request to the APIs, you need to acquire Microsoft Entra ID access token. You can follow instruction in this page: https://docs.microsoft.com/azure/databricks/dev-tools/api/latest/aad/app-aad-token#get-an-azure-ad-access-token \n - Notice: Please request Microsoft Entra ID access token with scope value: https://management.azure.com/.default ""}, {""title"": ""2. Send STIX objects to Sentinel"", ""description"": ""You can send the supported STIX object types by calling our Upload API. For more information about the API, click [here](https://learn.microsoft.com/azure/sentinel/stix-objects-api). \n\n>HTTP method: POST \n\n>Endpoint: https://api.ti.sentinel.azure.com/workspaces/[WorkspaceID]/threatintelligence-stix-objects:upload?api-version=2024-02-01-preview \n\n>WorkspaceID: the workspace that the STIX objects are uploaded to. \n\n\n>Header Value 1: \""Authorization\"" = \""Bearer [Microsoft Entra ID Access Token from step 1]\"" \n\n\n> Header Value 2: \""Content-Type\"" = \""application/json\"" \n \n>Body: The body is a JSON object containing an array of STIX objects.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.SecurityInsights/threatintelligence/write"", ""permissionsDisplayText"": ""write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligenceUploadIndicators.json","true" +"","Threat Intelligence Solution for Azure Government","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20Solution%20for%20Azure%20Government","azuresentinel","azure-sentinel-solution-threatintelligenceazuregov","2023-03-06","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"","ThreatAnalysis&Response","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ThreatAnalysis%26Response","azuresentinel","azure-sentinel-solution-mitreattck","2021-10-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","ThreatConnect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ThreatConnect","threatconnectinc1694630408738","sentinel-threatconnect-byol-enterprise","2023-09-11","2023-09-11","","ThreatConnect, Inc.","Partner","https://threatconnect.com/contact/","","domains","","","","","","","","false" +"","ThreatXCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ThreatXCloud","azuresentinel","azure-sentinel-solution-threatxwaf","2022-09-23","2022-09-23","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"Tomcat_CL","Tomcat","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tomcat","azuresentinel","azure-sentinel-solution-apachetomcat","2022-01-31","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ApacheTomcat","Apache","[Deprecated] Apache Tomcat","The Apache Tomcat solution provides the capability to ingest [Apache Tomcat](http://tomcat.apache.org/) events into Microsoft Sentinel. Refer to [Apache Tomcat documentation](http://tomcat.apache.org/tomcat-10.0-doc/logging.html) for more information.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias TomcatEvent and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tomcat/Parsers/TomcatEvent.txt).The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using Apache Tomcat version 10.0.4"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Apache Tomcat Server where the logs are generated.\n\n> Logs from Apache Tomcat Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the custom log directory to be collected"", ""instructions"": [{""parameters"": {""linkType"": ""OpenCustomLogsSettings""}, ""type"": ""InstallAgent""}]}, {""title"": """", ""description"": ""1. Select the link above to open your workspace advanced settings \n2. From the left pane, select **Data**, select **Custom Logs** and click **Add+**\n3. Click **Browse** to upload a sample of a Tomcat log file (e.g. access.log or error.log). Then, click **Next >**\n4. Select **New line** as the record delimiter and click **Next >**\n5. Select **Windows** or **Linux** and enter the path to Tomcat logs based on your configuration. Example: \n - **Linux** Directory: '/var/log/tomcat/*.log' \n6. After entering the path, click the '+' symbol to apply, then click **Next >** \n7. Add **Tomcat_CL** as the custom log Name and click **Done**""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tomcat/Data%20Connectors/Connector_Tomcat_agent.json","true" +"","Torq","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Torq","torqtechnologiesltd2020","torq_sentinel_solution","2024-12-24","","","Torq Support Team","Partner","https://support.torq.io","","domains","","","","","","","","false" +"","Training","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Training","","","","","","","","","","","","","","","","","","false" +"","TransmitSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TransmitSecurity","transmitsecurity","microsoft-sentinel-solution-transmitsecurity","2024-06-10","2024-11-20","","Transmit Security","Partner","https://transmitsecurity.com/support","","domains","","","","","","","","false" +"CommonSecurityLog","Trend Micro Apex One","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Apex%20One","azuresentinel","azure-sentinel-solution-trendmicroapexone","2021-07-06","2022-03-24","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","TrendMicroApexOne","Trend Micro","[Deprecated] Trend Micro Apex One via Legacy Agent","The [Trend Micro Apex One](https://www.trendmicro.com/en_us/business/products/user-protection/sps/endpoint.html) data connector provides the capability to ingest [Trend Micro Apex One events](https://aka.ms/sentinel-TrendMicroApex-OneEvents) into Microsoft Sentinel. Refer to [Trend Micro Apex Central](https://aka.ms/sentinel-TrendMicroApex-OneCentral) for more information.","[{""title"": """", ""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected [**TMApexOneEvent**](https://aka.ms/sentinel-TMApexOneEvent-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using Trend Micro Apex Central 2019"", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""[Follow these steps](https://docs.trendmicro.com/en-us/enterprise/trend-micro-apex-central-2019-online-help/detections/logs_001/syslog-forwarding.aspx) to configure Apex Central sending alerts via syslog. While configuring, on step 6, select the log format **CEF**.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Apex%20One/Data%20Connectors/TrendMicro_ApexOne.json","true" +"CommonSecurityLog","Trend Micro Apex One","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Apex%20One","azuresentinel","azure-sentinel-solution-trendmicroapexone","2021-07-06","2022-03-24","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","TrendMicroApexOneAma","Trend Micro","[Deprecated] Trend Micro Apex One via AMA","The [Trend Micro Apex One](https://www.trendmicro.com/en_us/business/products/user-protection/sps/endpoint.html) data connector provides the capability to ingest [Trend Micro Apex One events](https://aka.ms/sentinel-TrendMicroApex-OneEvents) into Microsoft Sentinel. Refer to [Trend Micro Apex Central](https://aka.ms/sentinel-TrendMicroApex-OneCentral) for more information.","[{""title"": """", ""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected [**TMApexOneEvent**](https://aka.ms/sentinel-TMApexOneEvent-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""[Follow these steps](https://docs.trendmicro.com/en-us/enterprise/trend-micro-apex-central-2019-online-help/detections/logs_001/syslog-forwarding.aspx) to configure Apex Central sending alerts via syslog. While configuring, on step 6, select the log format **CEF**."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Apex%20One/Data%20Connectors/template_TrendMicro_ApexOneAMA.json","true" +"TrendMicroCAS_CL","Trend Micro Cloud App Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Cloud%20App%20Security","azuresentinel","azuresentinel.trendmicrocas","2021-09-28","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","TrendMicroCAS","Trend Micro","Trend Micro Cloud App Security","The [Trend Micro Cloud App Security](https://www.trendmicro.com/en_be/business/products/user-protection/sps/email-and-collaboration/cloud-app-security.html) data connector provides the capability to retrieve security event logs of the services that Cloud App Security protects and more events into Microsoft Sentinel through the Log Retrieval API. Refer to API [documentation](https://docs.trendmicro.com/en-us/enterprise/cloud-app-security-integration-api-online-help/supported-cloud-app-/log-retrieval-api/get-security-logs.aspx) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Azure Blob Storage API to pull logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**TrendMicroCAS**](https://aka.ms/sentinel-TrendMicroCAS-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Trend Micro Log Retrieval API**\n\n Follow the instructions to obtain the credentials.\n\n1. Obtain the **TrendMicroCASToken** using the [documentation](https://docs.trendmicro.com/en-us/enterprise/cloud-app-security-integration-api-online-help/getting-started-with/generating-an-authen.aspx).\n2. Save credentials for using in the data connector.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Trend Micro Cloud App Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Trend Micro Cloud App Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TrendMicroCAS-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **TrendMicroCASToken**, **TrendMicroCASServiceURL** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Trend Micro Cloud App Security data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TMCASAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. TMCASXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tTrendMicroCASToken\n\t\tTrendMicroCASServiceURL\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**TrendMicroCASToken** and **TrendMicroCASServiceURL** are required for making API calls. See the [documentation](https://docs.trendmicro.com/en-us/enterprise/cloud-app-security-integration-api-online-help/getting-started-with/using-cloud-app-secu.aspx) to learn more about API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Cloud%20App%20Security/Data%20Connectors/TerndMicroCAS_API_FunctionApp.json","true" +"CommonSecurityLog","Trend Micro Deep Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Deep%20Security","trendmicro","trend_micro_deep_security_mss","2022-05-10","","","Trend Micro","Partner","https://success.trendmicro.com/dcx/s/?language=en_US","","domains","TrendMicro","Trend Micro","[Deprecated] Trend Micro Deep Security via Legacy","The Trend Micro Deep Security connector allows you to easily connect your Deep Security logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's networks/systems and improves your security operation capabilities.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Trend Micro Deep Security logs to Syslog agent"", ""description"": ""1. Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure to send the logs to port 514 TCP on the machine's IP address.\n2. Forward Trend Micro Deep Security events to the Syslog agent.\n3. Define a new Syslog Configuration that uses the CEF format by referencing [this knowledge article](https://aka.ms/Sentinel-trendmicro-kblink) for additional information.\n4. Configure the Deep Security Manager to use this new configuration to forward events to the Syslog agent using [these instructions](https://aka.ms/Sentinel-trendMicro-connectorInstructions).\n5. Make sure to save the [TrendMicroDeepSecurity](https://aka.ms/TrendMicroDeepSecurityFunction) function so that it queries the Trend Micro Deep Security data properly.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Deep%20Security/Data%20Connectors/TrendMicroDeepSecurity.json","true" +"CommonSecurityLog","Trend Micro TippingPoint","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20TippingPoint","trendmicro","trend_micro_tippingpoint_mss","2022-05-02","","","Trend Micro","Partner","https://success.trendmicro.com/dcx/s/contactus?language=en_US","","domains","TrendMicroTippingPoint","Trend Micro","[Deprecated] Trend Micro TippingPoint via Legacy","The Trend Micro TippingPoint connector allows you to easily connect your TippingPoint SMS IPS events with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's networks/systems and improves your security operation capabilities.","[{""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias TrendMicroTippingPoint and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20TippingPoint/Parsers/TrendMicroTippingPoint).The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Trend Micro TippingPoint SMS logs to Syslog agent"", ""description"": ""Set your TippingPoint SMS to send Syslog messages in ArcSight CEF Format v4.2 format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20TippingPoint/Data%20Connectors/TrendMicroTippingPoint.json","true" +"TrendMicro_XDR_OAT_CL","Trend Micro Vision One","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Vision%20One","trendmicro","trend_micro_vision_one_xdr_mss","2022-05-11","2024-07-16","","Trend Micro","Partner","https://success.trendmicro.com/dcx/s/?language=en_US","","domains","TrendMicroXDR","Trend Micro","Trend Vision One","The [Trend Vision One](https://www.trendmicro.com/en_us/business/products/detection-response/xdr.html) connector allows you to easily connect your Workbench alert data with Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities. This gives you more insight into your organization's networks/systems and improves your security operation capabilities.

The Trend Vision One connector is supported in Microsoft Sentinel in the following regions: Australia East, Australia Southeast, Brazil South, Canada Central, Canada East, Central India, Central US, East Asia, East US, East US 2, France Central, Japan East, Korea Central, North Central US, North Europe, Norway East, South Africa North, South Central US, Southeast Asia, Sweden Central, Switzerland North, UAE North, UK South, UK West, West Europe, West US, West US 2, West US 3.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Trend Vision One API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Trend Vision One API**\n\n [Follow these instructions](https://docs.trendmicro.com/documentation/article/trend-vision-one-api-keys-third-party-apps) to create an account and an API authentication token.""}, {""title"": """", ""description"": ""**STEP 2 - Use the below deployment option to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Trend Vision One connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Trend Vision One API Authorization Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template Deployment"", ""description"": ""This method provides an automated deployment of the Trend Vision One connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-trendmicroxdr-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter a unique **Function Name**, **Workspace ID**, **Workspace Key**, **API Token** and **Region Code**. \n - Note: Provide the appropriate region code based on where your Trend Vision One instance is deployed: us, eu, au, in, sg, jp \n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Trend Vision One API Token"", ""description"": ""A Trend Vision One API Token is required. See the documentation to learn more about the [Trend Vision One API](https://docs.trendmicro.com/documentation/article/trend-vision-one-api-keys-third-party-apps).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Vision%20One/Data%20Connectors/TrendMicroXDR.json","true" +"TrendMicro_XDR_RCA_Result_CL","Trend Micro Vision One","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Vision%20One","trendmicro","trend_micro_vision_one_xdr_mss","2022-05-11","2024-07-16","","Trend Micro","Partner","https://success.trendmicro.com/dcx/s/?language=en_US","","domains","TrendMicroXDR","Trend Micro","Trend Vision One","The [Trend Vision One](https://www.trendmicro.com/en_us/business/products/detection-response/xdr.html) connector allows you to easily connect your Workbench alert data with Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities. This gives you more insight into your organization's networks/systems and improves your security operation capabilities.

The Trend Vision One connector is supported in Microsoft Sentinel in the following regions: Australia East, Australia Southeast, Brazil South, Canada Central, Canada East, Central India, Central US, East Asia, East US, East US 2, France Central, Japan East, Korea Central, North Central US, North Europe, Norway East, South Africa North, South Central US, Southeast Asia, Sweden Central, Switzerland North, UAE North, UK South, UK West, West Europe, West US, West US 2, West US 3.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Trend Vision One API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Trend Vision One API**\n\n [Follow these instructions](https://docs.trendmicro.com/documentation/article/trend-vision-one-api-keys-third-party-apps) to create an account and an API authentication token.""}, {""title"": """", ""description"": ""**STEP 2 - Use the below deployment option to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Trend Vision One connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Trend Vision One API Authorization Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template Deployment"", ""description"": ""This method provides an automated deployment of the Trend Vision One connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-trendmicroxdr-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter a unique **Function Name**, **Workspace ID**, **Workspace Key**, **API Token** and **Region Code**. \n - Note: Provide the appropriate region code based on where your Trend Vision One instance is deployed: us, eu, au, in, sg, jp \n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Trend Vision One API Token"", ""description"": ""A Trend Vision One API Token is required. See the documentation to learn more about the [Trend Vision One API](https://docs.trendmicro.com/documentation/article/trend-vision-one-api-keys-third-party-apps).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Vision%20One/Data%20Connectors/TrendMicroXDR.json","true" +"TrendMicro_XDR_RCA_Task_CL","Trend Micro Vision One","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Vision%20One","trendmicro","trend_micro_vision_one_xdr_mss","2022-05-11","2024-07-16","","Trend Micro","Partner","https://success.trendmicro.com/dcx/s/?language=en_US","","domains","TrendMicroXDR","Trend Micro","Trend Vision One","The [Trend Vision One](https://www.trendmicro.com/en_us/business/products/detection-response/xdr.html) connector allows you to easily connect your Workbench alert data with Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities. This gives you more insight into your organization's networks/systems and improves your security operation capabilities.

The Trend Vision One connector is supported in Microsoft Sentinel in the following regions: Australia East, Australia Southeast, Brazil South, Canada Central, Canada East, Central India, Central US, East Asia, East US, East US 2, France Central, Japan East, Korea Central, North Central US, North Europe, Norway East, South Africa North, South Central US, Southeast Asia, Sweden Central, Switzerland North, UAE North, UK South, UK West, West Europe, West US, West US 2, West US 3.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Trend Vision One API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Trend Vision One API**\n\n [Follow these instructions](https://docs.trendmicro.com/documentation/article/trend-vision-one-api-keys-third-party-apps) to create an account and an API authentication token.""}, {""title"": """", ""description"": ""**STEP 2 - Use the below deployment option to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Trend Vision One connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Trend Vision One API Authorization Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template Deployment"", ""description"": ""This method provides an automated deployment of the Trend Vision One connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-trendmicroxdr-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter a unique **Function Name**, **Workspace ID**, **Workspace Key**, **API Token** and **Region Code**. \n - Note: Provide the appropriate region code based on where your Trend Vision One instance is deployed: us, eu, au, in, sg, jp \n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Trend Vision One API Token"", ""description"": ""A Trend Vision One API Token is required. See the documentation to learn more about the [Trend Vision One API](https://docs.trendmicro.com/documentation/article/trend-vision-one-api-keys-third-party-apps).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Vision%20One/Data%20Connectors/TrendMicroXDR.json","true" +"TrendMicro_XDR_WORKBENCH_CL","Trend Micro Vision One","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Vision%20One","trendmicro","trend_micro_vision_one_xdr_mss","2022-05-11","2024-07-16","","Trend Micro","Partner","https://success.trendmicro.com/dcx/s/?language=en_US","","domains","TrendMicroXDR","Trend Micro","Trend Vision One","The [Trend Vision One](https://www.trendmicro.com/en_us/business/products/detection-response/xdr.html) connector allows you to easily connect your Workbench alert data with Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities. This gives you more insight into your organization's networks/systems and improves your security operation capabilities.

The Trend Vision One connector is supported in Microsoft Sentinel in the following regions: Australia East, Australia Southeast, Brazil South, Canada Central, Canada East, Central India, Central US, East Asia, East US, East US 2, France Central, Japan East, Korea Central, North Central US, North Europe, Norway East, South Africa North, South Central US, Southeast Asia, Sweden Central, Switzerland North, UAE North, UK South, UK West, West Europe, West US, West US 2, West US 3.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Trend Vision One API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Trend Vision One API**\n\n [Follow these instructions](https://docs.trendmicro.com/documentation/article/trend-vision-one-api-keys-third-party-apps) to create an account and an API authentication token.""}, {""title"": """", ""description"": ""**STEP 2 - Use the below deployment option to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Trend Vision One connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Trend Vision One API Authorization Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template Deployment"", ""description"": ""This method provides an automated deployment of the Trend Vision One connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-trendmicroxdr-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter a unique **Function Name**, **Workspace ID**, **Workspace Key**, **API Token** and **Region Code**. \n - Note: Provide the appropriate region code based on where your Trend Vision One instance is deployed: us, eu, au, in, sg, jp \n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Trend Vision One API Token"", ""description"": ""A Trend Vision One API Token is required. See the documentation to learn more about the [Trend Vision One API](https://docs.trendmicro.com/documentation/article/trend-vision-one-api-keys-third-party-apps).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Vision%20One/Data%20Connectors/TrendMicroXDR.json","true" +"","UEBA Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/UEBA%20Essentials","azuresentinel","azure-sentinel-solution-uebaessentials","2022-06-27","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"","URLhaus","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/URLhaus","azuresentinel","azure-sentinel-solution-urlhaus","2022-09-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"Ubiquiti_CL","Ubiquiti UniFi","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Ubiquiti%20UniFi","azuresentinel","azure-sentinel-solution-ubiquitiunifi","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","UbiquitiUnifi","Ubiquiti","[Deprecated] Ubiquiti UniFi","The [Ubiquiti UniFi](https://www.ui.com/) data connector provides the capability to ingest [Ubiquiti UniFi firewall, dns, ssh, AP events](https://help.ui.com/hc/en-us/articles/204959834-UniFi-How-to-View-Log-Files) into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**UbiquitiAuditEvent**](https://aka.ms/sentinel-UbiquitiUnifi-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using Enterprise System Controller Release Version: 5.6.2 (Syslog)"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Server to which the Ubiquiti logs are forwarder from Ubiquiti device (e.g.remote syslog server)\n\n> Logs from Ubiquiti Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Follow the configuration steps below to get Ubiquiti logs into Microsoft Sentinel. Refer to the [Azure Monitor Documentation](https://docs.microsoft.com/azure/azure-monitor/agents/data-sources-json) for more details on these steps.\n1. Configure log forwarding on your Ubiquiti controller: \n\n\t i. Go to Settings > System Setting > Controller Configuration > Remote Logging and enable the Syslog and Debugging (optional) logs (Refer to [User Guide](https://dl.ui.com/guides/UniFi/UniFi_Controller_V5_UG.pdf) for detailed instructions).\n2. Download config file [Ubiquiti.conf](https://aka.ms/sentinel-UbiquitiUnifi-conf).\n3. Login to the server where you have installed Azure Log Analytics agent.\n4. Copy Ubiquiti.conf to the /etc/opt/microsoft/omsagent/**workspace_id**/conf/omsagent.d/ folder.\n5. Edit Ubiquiti.conf as follows:\n\n\t i. specify port which you have set your Ubiquiti device to forward logs to (line 4)\n\n\t ii. replace **workspace_id** with real value of your Workspace ID (lines 14,15,16,19)\n5. Save changes and restart the Azure Log Analytics agent for Linux service with the following command:\n\t\tsudo /opt/microsoft/omsagent/bin/service_control restart"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Ubiquiti%20UniFi/Data%20Connectors/Connector_Ubiquiti_agent.json","true" +"ThreatIntelligenceIndicator","VMRay","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMRay","vmraygmbh1623334327435","microsoft-sentinel-solution-vmray","2025-07-23","","","VMRay","Partner","https://www.vmray.com/contact/customer-support/","","domains","VMRay","VMRay","VMRayThreatIntelligence","VMRayThreatIntelligence connector automatically generates and feeds threat intelligence for all submissions to VMRay, improving threat detection and incident response in Sentinel. This seamless integration empowers teams to proactively address emerging threats.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the VMRay API to pull VMRay Threat IOCs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Deploy VMRay Threat Intelligence Connector"", ""description"": ""1. Ensure you have all the required prerequisites: **Client ID**, **Tenant ID**, **Client Secret**, **VMRay API Key**, and **VMRay Base URL**.\n2. To obtain the Client ID, Client Secret, and Tenant ID, [follow these instructions](https://github.com/Azure/Azure-Sentinel/tree/master/Solutions/VMRay#vmray-configurations)\n3. For the **Flex Consumption Plan**, click the **Deploy to Azure** button below:\n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-VMRay-azuredeployflex)\n\n4. For the **Premium Plan**, click the **Deploy to Azure** button below:\n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-VMRay-azuredeploypremium).""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**VMRay API Key** is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMRay/Data%20Connectors/VMRayThreatIntelligence_FunctionApp.json","true" +"Syslog","VMWareESXi","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMWareESXi","azuresentinel","azure-sentinel-solution-vmwareesxi","2022-01-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","VMwareESXi","VMWare","[Deprecated] VMware ESXi","The [VMware ESXi](https://www.vmware.com/products/esxi-and-esx.html) connector allows you to easily connect your VMWare ESXi logs with Microsoft Sentinel This gives you more insight into your organization's ESXi servers and improves your security operation capabilities.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias VMwareESXi and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMWareESXi/Parsers/VMwareESXi.yaml), on the second line of the query, enter the hostname(s) of your VMwareESXi device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n 2. Select **Apply below configuration to my machines** and select the facilities and severities.\n 3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure and connect the VMware ESXi"", ""description"": ""1. Follow these instructions to configure the VMWare ESXi to forward syslog: \n - [VMware ESXi 3.5 and 4.x](https://kb.vmware.com/s/article/1016621) \n - [VMware ESXi 5.0+](https://docs.vmware.com/en/VMware-vSphere/5.5/com.vmware.vsphere.monitoring.doc/GUID-9F67DB52-F469-451F-B6C8-DAE8D95976E7.html)\n2. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""VMwareESXi"", ""description"": ""must be configured to export logs via Syslog""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMWareESXi/Data%20Connectors/Connector_Syslog_VMwareESXi.json","true" +"CarbonBlackAuditLogs_CL","VMware Carbon Black Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud","azuresentinel","azure-sentinel-solution-vmwarecarbonblack","2022-06-01","","","Microsoft","Microsoft","https://support.microsoft.com/","","domains","VMwareCarbonBlack","VMware","VMware Carbon Black Cloud","The [VMware Carbon Black Cloud](https://www.vmware.com/products/carbon-black-cloud.html) connector provides the capability to ingest Carbon Black data into Microsoft Sentinel. The connector provides visibility into Audit, Notification and Event logs in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to VMware Carbon Black to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the VMware Carbon Black API**\n\n [Follow these instructions](https://developer.carbonblack.com/reference/carbon-black-cloud/authentication/#creating-an-api-key) to create an API Key.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the VMware Carbon Black connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the VMware Carbon Black API Authorization Key(s), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""This method provides an automated deployment of the VMware Carbon Black connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelcarbonblackazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelcarbonblackazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **Log Types**, **API ID(s)**, **API Key(s)**, **Carbon Black Org Key**, **S3 Bucket Name**, **AWS Access Key Id**, **AWS Secret Access Key**, **EventPrefixFolderName**,**AlertPrefixFolderName**, and validate the **URI**.\n> - Enter the URI that corresponds to your region. The complete list of API URLs can be [found here](https://community.carbonblack.com/t5/Knowledge-Base/PSC-What-URLs-are-used-to-access-the-APIs/ta-p/67346)\n - The default **Time Interval** is set to pull the last five (5) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. \n - Carbon Black requires a seperate set of API ID/Keys to ingest Notification alerts. Enter the SIEM API ID/Key values or leave blank, if not required. \n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the VMware Carbon Black connector manually with Azure Functions.""}, {""title"": """", ""description"": ""**1. Create a Function App**\n\n1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp), and select **+ Add**.\n2. In the **Basics** tab, ensure Runtime stack is set to **Powershell Core**. \n3. In the **Hosting** tab, ensure the **Consumption (Serverless)** plan type is selected.\n4. Make other preferrable configuration changes, if needed, then click **Create**.""}, {""title"": """", ""description"": ""**2. Import Function App Code**\n\n1. In the newly created Function App, select **Functions** on the left pane and click **+ Add**.\n2. Select **Timer Trigger**.\n3. Enter a unique Function **Name** and modify the cron schedule, if needed. The default value is set to run the Function App every 5 minutes. (Note: the Timer trigger should match the `timeInterval` value below to prevent overlapping data), click **Create**.\n4. Click on **Code + Test** on the left pane. \n5. Copy the [Function App Code](https://aka.ms/sentinelcarbonblackazurefunctioncode) and paste into the Function App `run.ps1` editor.\n5. Click **Save**.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following thirteen to sixteen (13-16) application settings individually, with their respective string values (case-sensitive): \n\t\tapiId\n\t\tapiKey\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\ttimeInterval\n\t\tCarbonBlackOrgKey\n\t\tCarbonBlackLogTypes \n\t\ts3BucketName \n\t\tEventPrefixFolderName \n\t\tAlertPrefixFolderName \n\t\tAWSAccessKeyId \n\t\tAWSSecretAccessKey \n\t\tSIEMapiId (Optional)\n\t\tSIEMapiKey (Optional)\n\t\tlogAnalyticsUri (optional) \n> - Enter the URI that corresponds to your region. The complete list of API URLs can be [found here](https://community.carbonblack.com/t5/Knowledge-Base/PSC-What-URLs-are-used-to-access-the-APIs/ta-p/67346). The `uri` value must follow the following schema: `https://.conferdeploy.net` - There is no need to add a time suffix to the URI, the Function App will dynamically append the Time Value to the URI in the proper format.\n> - Set the `timeInterval` (in minutes) to the default value of `5` to correspond to the default Timer Trigger of every `5` minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly to prevent overlapping data ingestion.\n> - Carbon Black requires a seperate set of API ID/Keys to ingest Notification alerts. Enter the `SIEMapiId` and `SIEMapiKey` values, if needed, or omit, if not required. \n> - Note: If using Azure Key Vault, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""VMware Carbon Black API Key(s)"", ""description"": ""Carbon Black API and/or SIEM Level API Key(s) are required. See the documentation to learn more about the [Carbon Black API](https://developer.carbonblack.com/reference/carbon-black-cloud/cb-defense/latest/rest-api/).\n - A Carbon Black **API** access level API ID and Key is required for [Audit](https://developer.carbonblack.com/reference/carbon-black-cloud/cb-defense/latest/rest-api/#audit-log-events) and [Event](https://developer.carbonblack.com/reference/carbon-black-cloud/platform/latest/data-forwarder-config-api/) logs. \n - A Carbon Black **SIEM** access level API ID and Key is required for [Notification](https://developer.carbonblack.com/reference/carbon-black-cloud/cb-defense/latest/rest-api/#notifications) alerts.""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name**, **Folder Name in AWS S3 Bucket** are required for Amazon S3 REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/VMwareCarbonBlack_API_FunctionApp.json","true" +"CarbonBlackEvents_CL","VMware Carbon Black Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud","azuresentinel","azure-sentinel-solution-vmwarecarbonblack","2022-06-01","","","Microsoft","Microsoft","https://support.microsoft.com/","","domains","VMwareCarbonBlack","VMware","VMware Carbon Black Cloud","The [VMware Carbon Black Cloud](https://www.vmware.com/products/carbon-black-cloud.html) connector provides the capability to ingest Carbon Black data into Microsoft Sentinel. The connector provides visibility into Audit, Notification and Event logs in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to VMware Carbon Black to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the VMware Carbon Black API**\n\n [Follow these instructions](https://developer.carbonblack.com/reference/carbon-black-cloud/authentication/#creating-an-api-key) to create an API Key.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the VMware Carbon Black connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the VMware Carbon Black API Authorization Key(s), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""This method provides an automated deployment of the VMware Carbon Black connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelcarbonblackazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelcarbonblackazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **Log Types**, **API ID(s)**, **API Key(s)**, **Carbon Black Org Key**, **S3 Bucket Name**, **AWS Access Key Id**, **AWS Secret Access Key**, **EventPrefixFolderName**,**AlertPrefixFolderName**, and validate the **URI**.\n> - Enter the URI that corresponds to your region. The complete list of API URLs can be [found here](https://community.carbonblack.com/t5/Knowledge-Base/PSC-What-URLs-are-used-to-access-the-APIs/ta-p/67346)\n - The default **Time Interval** is set to pull the last five (5) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. \n - Carbon Black requires a seperate set of API ID/Keys to ingest Notification alerts. Enter the SIEM API ID/Key values or leave blank, if not required. \n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the VMware Carbon Black connector manually with Azure Functions.""}, {""title"": """", ""description"": ""**1. Create a Function App**\n\n1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp), and select **+ Add**.\n2. In the **Basics** tab, ensure Runtime stack is set to **Powershell Core**. \n3. In the **Hosting** tab, ensure the **Consumption (Serverless)** plan type is selected.\n4. Make other preferrable configuration changes, if needed, then click **Create**.""}, {""title"": """", ""description"": ""**2. Import Function App Code**\n\n1. In the newly created Function App, select **Functions** on the left pane and click **+ Add**.\n2. Select **Timer Trigger**.\n3. Enter a unique Function **Name** and modify the cron schedule, if needed. The default value is set to run the Function App every 5 minutes. (Note: the Timer trigger should match the `timeInterval` value below to prevent overlapping data), click **Create**.\n4. Click on **Code + Test** on the left pane. \n5. Copy the [Function App Code](https://aka.ms/sentinelcarbonblackazurefunctioncode) and paste into the Function App `run.ps1` editor.\n5. Click **Save**.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following thirteen to sixteen (13-16) application settings individually, with their respective string values (case-sensitive): \n\t\tapiId\n\t\tapiKey\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\ttimeInterval\n\t\tCarbonBlackOrgKey\n\t\tCarbonBlackLogTypes \n\t\ts3BucketName \n\t\tEventPrefixFolderName \n\t\tAlertPrefixFolderName \n\t\tAWSAccessKeyId \n\t\tAWSSecretAccessKey \n\t\tSIEMapiId (Optional)\n\t\tSIEMapiKey (Optional)\n\t\tlogAnalyticsUri (optional) \n> - Enter the URI that corresponds to your region. The complete list of API URLs can be [found here](https://community.carbonblack.com/t5/Knowledge-Base/PSC-What-URLs-are-used-to-access-the-APIs/ta-p/67346). The `uri` value must follow the following schema: `https://.conferdeploy.net` - There is no need to add a time suffix to the URI, the Function App will dynamically append the Time Value to the URI in the proper format.\n> - Set the `timeInterval` (in minutes) to the default value of `5` to correspond to the default Timer Trigger of every `5` minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly to prevent overlapping data ingestion.\n> - Carbon Black requires a seperate set of API ID/Keys to ingest Notification alerts. Enter the `SIEMapiId` and `SIEMapiKey` values, if needed, or omit, if not required. \n> - Note: If using Azure Key Vault, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""VMware Carbon Black API Key(s)"", ""description"": ""Carbon Black API and/or SIEM Level API Key(s) are required. See the documentation to learn more about the [Carbon Black API](https://developer.carbonblack.com/reference/carbon-black-cloud/cb-defense/latest/rest-api/).\n - A Carbon Black **API** access level API ID and Key is required for [Audit](https://developer.carbonblack.com/reference/carbon-black-cloud/cb-defense/latest/rest-api/#audit-log-events) and [Event](https://developer.carbonblack.com/reference/carbon-black-cloud/platform/latest/data-forwarder-config-api/) logs. \n - A Carbon Black **SIEM** access level API ID and Key is required for [Notification](https://developer.carbonblack.com/reference/carbon-black-cloud/cb-defense/latest/rest-api/#notifications) alerts.""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name**, **Folder Name in AWS S3 Bucket** are required for Amazon S3 REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/VMwareCarbonBlack_API_FunctionApp.json","true" +"CarbonBlackNotifications_CL","VMware Carbon Black Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud","azuresentinel","azure-sentinel-solution-vmwarecarbonblack","2022-06-01","","","Microsoft","Microsoft","https://support.microsoft.com/","","domains","VMwareCarbonBlack","VMware","VMware Carbon Black Cloud","The [VMware Carbon Black Cloud](https://www.vmware.com/products/carbon-black-cloud.html) connector provides the capability to ingest Carbon Black data into Microsoft Sentinel. The connector provides visibility into Audit, Notification and Event logs in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to VMware Carbon Black to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the VMware Carbon Black API**\n\n [Follow these instructions](https://developer.carbonblack.com/reference/carbon-black-cloud/authentication/#creating-an-api-key) to create an API Key.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the VMware Carbon Black connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the VMware Carbon Black API Authorization Key(s), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""This method provides an automated deployment of the VMware Carbon Black connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelcarbonblackazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelcarbonblackazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **Log Types**, **API ID(s)**, **API Key(s)**, **Carbon Black Org Key**, **S3 Bucket Name**, **AWS Access Key Id**, **AWS Secret Access Key**, **EventPrefixFolderName**,**AlertPrefixFolderName**, and validate the **URI**.\n> - Enter the URI that corresponds to your region. The complete list of API URLs can be [found here](https://community.carbonblack.com/t5/Knowledge-Base/PSC-What-URLs-are-used-to-access-the-APIs/ta-p/67346)\n - The default **Time Interval** is set to pull the last five (5) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. \n - Carbon Black requires a seperate set of API ID/Keys to ingest Notification alerts. Enter the SIEM API ID/Key values or leave blank, if not required. \n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the VMware Carbon Black connector manually with Azure Functions.""}, {""title"": """", ""description"": ""**1. Create a Function App**\n\n1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp), and select **+ Add**.\n2. In the **Basics** tab, ensure Runtime stack is set to **Powershell Core**. \n3. In the **Hosting** tab, ensure the **Consumption (Serverless)** plan type is selected.\n4. Make other preferrable configuration changes, if needed, then click **Create**.""}, {""title"": """", ""description"": ""**2. Import Function App Code**\n\n1. In the newly created Function App, select **Functions** on the left pane and click **+ Add**.\n2. Select **Timer Trigger**.\n3. Enter a unique Function **Name** and modify the cron schedule, if needed. The default value is set to run the Function App every 5 minutes. (Note: the Timer trigger should match the `timeInterval` value below to prevent overlapping data), click **Create**.\n4. Click on **Code + Test** on the left pane. \n5. Copy the [Function App Code](https://aka.ms/sentinelcarbonblackazurefunctioncode) and paste into the Function App `run.ps1` editor.\n5. Click **Save**.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following thirteen to sixteen (13-16) application settings individually, with their respective string values (case-sensitive): \n\t\tapiId\n\t\tapiKey\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\ttimeInterval\n\t\tCarbonBlackOrgKey\n\t\tCarbonBlackLogTypes \n\t\ts3BucketName \n\t\tEventPrefixFolderName \n\t\tAlertPrefixFolderName \n\t\tAWSAccessKeyId \n\t\tAWSSecretAccessKey \n\t\tSIEMapiId (Optional)\n\t\tSIEMapiKey (Optional)\n\t\tlogAnalyticsUri (optional) \n> - Enter the URI that corresponds to your region. The complete list of API URLs can be [found here](https://community.carbonblack.com/t5/Knowledge-Base/PSC-What-URLs-are-used-to-access-the-APIs/ta-p/67346). The `uri` value must follow the following schema: `https://.conferdeploy.net` - There is no need to add a time suffix to the URI, the Function App will dynamically append the Time Value to the URI in the proper format.\n> - Set the `timeInterval` (in minutes) to the default value of `5` to correspond to the default Timer Trigger of every `5` minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly to prevent overlapping data ingestion.\n> - Carbon Black requires a seperate set of API ID/Keys to ingest Notification alerts. Enter the `SIEMapiId` and `SIEMapiKey` values, if needed, or omit, if not required. \n> - Note: If using Azure Key Vault, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""VMware Carbon Black API Key(s)"", ""description"": ""Carbon Black API and/or SIEM Level API Key(s) are required. See the documentation to learn more about the [Carbon Black API](https://developer.carbonblack.com/reference/carbon-black-cloud/cb-defense/latest/rest-api/).\n - A Carbon Black **API** access level API ID and Key is required for [Audit](https://developer.carbonblack.com/reference/carbon-black-cloud/cb-defense/latest/rest-api/#audit-log-events) and [Event](https://developer.carbonblack.com/reference/carbon-black-cloud/platform/latest/data-forwarder-config-api/) logs. \n - A Carbon Black **SIEM** access level API ID and Key is required for [Notification](https://developer.carbonblack.com/reference/carbon-black-cloud/cb-defense/latest/rest-api/#notifications) alerts.""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name**, **Folder Name in AWS S3 Bucket** are required for Amazon S3 REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/VMwareCarbonBlack_API_FunctionApp.json","true" +"ASimAuthenticationEventLogs","VMware Carbon Black Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud","azuresentinel","azure-sentinel-solution-vmwarecarbonblack","2022-06-01","","","Microsoft","Microsoft","https://support.microsoft.com/","","domains","carbonBlackAWSS3","Microsoft","VMware Carbon Black Cloud via AWS S3","The [VMware Carbon Black Cloud](https://www.vmware.com/products/carbon-black-cloud.html) via AWS S3 data connector provides the capability to ingest watchlist, alerts, auth and endpoints events via AWS S3 and stream them to ASIM normalized tables. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS CloudFormation Deployment \n To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create) \n 2. In AWS, choose the 'Upload a template file' option and click on 'Choose file'. Select the downloaded template \n 3. Click 'Next' and 'Create stack'""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS Carbon Black resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CarbonBlack""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""When deploying 'Template 2: AWS Carbon Black resources deployment' template you'll need supply a few parameters \n * **Stack Name**: A stack name of your choosing (will appear in the list of stacks in AWS)\n * **Role Name**: Must begin with 'OIDC_' prefix, has a default value. \n * **Bucket Name**: Bucket name of your choosing, if you already have an existing bucket paste the name here \n * **CreateNewBucket**: If you already have an existing bucket that you would like to use for this connector select 'false' for this option, otherwise a bucket with the name you entered in 'Bucket Name' will be created from this stack. \n * **Region**: This is the region of the AWS resources based on Carbon Black's mapping - for more information please see [Carbon Black documentation](https://developer.carbonblack.com/reference/carbon-black-cloud/integrations/data-forwarder/quick-setup/#create-a-bucket).\n * **SQSQueuePrefix**: The stack create multiple queues, this prefix will be added to each one of them. \n * **WorkspaceID**: Use the Workspace ID provided below.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Once the deployment is complete - head to the 'Outputs' tab, you will see: Role ARN, S3 bucket and 4 SQS resources created. You will need those resources in the next step when configuring Carbon Black's data forwarders and the data connector.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Carbon Black data forwarder configuration \n After all AWS resources has been created you'll need to configure Carbon Black to forward the events to the AWS buckets for Microsoft Sentinel to ingest them. Follow [Carbon Black's documentation on how to create a 'Data Forwarders'](https://developer.carbonblack.com/reference/carbon-black-cloud/integrations/data-forwarder/quick-setup/#2-create-a-forwarder) Use the first recommended option. When asked to input a bucket name use the bucket created in the previous step. \n You will be required to add 'S3 prefix' for each forwarder, please use this mapping:""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Alert | carbon-black-cloud-forwarder/Alerts |\n | Auth Events | carbon-black-cloud-forwarder/Auth |\n | Endpoint Events | carbon-black-cloud-forwarder/Endpoint |\n | Watchlist Hit | carbon-black-cloud-forwarder/Watchlist |""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2.1. Test your data forwarder (Optional) \n To validate the data forwarder is configured as expected, in Carbon Black's portal search for the data forwarder that you just created and click on 'Test Forwarder' button under the 'Actions' column, this will generate a 'HealthCheck' file in the S3 Bucket, you should see it appear immediately.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the 'Add new collector' button, fill the required information, the ARN role and the SQS URL are created in step 1, note that you will need to enter the correct SQS URL and select the appropriate event type from the dropdown, for example if you want to ingest Alert events you will need to copy the Alerts SQS URL and select the 'Alerts' event type in the dropdown""}}]}, {""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CarbonBlackAlertsStream"", ""text"": ""Alerts""}, {""key"": ""Custom-CarbonBlackAuthStream"", ""text"": ""Auth Events""}, {""key"": ""Custom-CarbonBlackEndpointStream"", ""text"": ""Endpoint Events""}, {""key"": ""Custom-CarbonBlackWatchlistStream"", ""text"": ""Watchlist""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Environment"", ""description"": ""You must have the following AWS resources defined and configured: S3, Simple Queue Service (SQS), IAM roles and permissions policies""}, {""name"": ""Environment"", ""description"": ""You must have the a Carbon black account and required permissions to create a Data Forwarded to AWS S3 buckets. \nFor more details visit [Carbon Black Data Forwarder Docs](https://docs.vmware.com/en/VMware-Carbon-Black-Cloud/services/carbon-black-cloud-user-guide/GUID-E8D33F72-BABB-4157-A908-D8BBDB5AF349.html)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/CarbonBlackViaAWSS3_ConnectorDefinition.json;https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/VMwareCarbonBlackCloud_ccp/CarbonBlack_DataConnectorDefination.json","false" +"ASimFileEventLogs","VMware Carbon Black Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud","azuresentinel","azure-sentinel-solution-vmwarecarbonblack","2022-06-01","","","Microsoft","Microsoft","https://support.microsoft.com/","","domains","carbonBlackAWSS3","Microsoft","VMware Carbon Black Cloud via AWS S3","The [VMware Carbon Black Cloud](https://www.vmware.com/products/carbon-black-cloud.html) via AWS S3 data connector provides the capability to ingest watchlist, alerts, auth and endpoints events via AWS S3 and stream them to ASIM normalized tables. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS CloudFormation Deployment \n To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create) \n 2. In AWS, choose the 'Upload a template file' option and click on 'Choose file'. Select the downloaded template \n 3. Click 'Next' and 'Create stack'""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS Carbon Black resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CarbonBlack""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""When deploying 'Template 2: AWS Carbon Black resources deployment' template you'll need supply a few parameters \n * **Stack Name**: A stack name of your choosing (will appear in the list of stacks in AWS)\n * **Role Name**: Must begin with 'OIDC_' prefix, has a default value. \n * **Bucket Name**: Bucket name of your choosing, if you already have an existing bucket paste the name here \n * **CreateNewBucket**: If you already have an existing bucket that you would like to use for this connector select 'false' for this option, otherwise a bucket with the name you entered in 'Bucket Name' will be created from this stack. \n * **Region**: This is the region of the AWS resources based on Carbon Black's mapping - for more information please see [Carbon Black documentation](https://developer.carbonblack.com/reference/carbon-black-cloud/integrations/data-forwarder/quick-setup/#create-a-bucket).\n * **SQSQueuePrefix**: The stack create multiple queues, this prefix will be added to each one of them. \n * **WorkspaceID**: Use the Workspace ID provided below.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Once the deployment is complete - head to the 'Outputs' tab, you will see: Role ARN, S3 bucket and 4 SQS resources created. You will need those resources in the next step when configuring Carbon Black's data forwarders and the data connector.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Carbon Black data forwarder configuration \n After all AWS resources has been created you'll need to configure Carbon Black to forward the events to the AWS buckets for Microsoft Sentinel to ingest them. Follow [Carbon Black's documentation on how to create a 'Data Forwarders'](https://developer.carbonblack.com/reference/carbon-black-cloud/integrations/data-forwarder/quick-setup/#2-create-a-forwarder) Use the first recommended option. When asked to input a bucket name use the bucket created in the previous step. \n You will be required to add 'S3 prefix' for each forwarder, please use this mapping:""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Alert | carbon-black-cloud-forwarder/Alerts |\n | Auth Events | carbon-black-cloud-forwarder/Auth |\n | Endpoint Events | carbon-black-cloud-forwarder/Endpoint |\n | Watchlist Hit | carbon-black-cloud-forwarder/Watchlist |""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2.1. Test your data forwarder (Optional) \n To validate the data forwarder is configured as expected, in Carbon Black's portal search for the data forwarder that you just created and click on 'Test Forwarder' button under the 'Actions' column, this will generate a 'HealthCheck' file in the S3 Bucket, you should see it appear immediately.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the 'Add new collector' button, fill the required information, the ARN role and the SQS URL are created in step 1, note that you will need to enter the correct SQS URL and select the appropriate event type from the dropdown, for example if you want to ingest Alert events you will need to copy the Alerts SQS URL and select the 'Alerts' event type in the dropdown""}}]}, {""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CarbonBlackAlertsStream"", ""text"": ""Alerts""}, {""key"": ""Custom-CarbonBlackAuthStream"", ""text"": ""Auth Events""}, {""key"": ""Custom-CarbonBlackEndpointStream"", ""text"": ""Endpoint Events""}, {""key"": ""Custom-CarbonBlackWatchlistStream"", ""text"": ""Watchlist""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Environment"", ""description"": ""You must have the following AWS resources defined and configured: S3, Simple Queue Service (SQS), IAM roles and permissions policies""}, {""name"": ""Environment"", ""description"": ""You must have the a Carbon black account and required permissions to create a Data Forwarded to AWS S3 buckets. \nFor more details visit [Carbon Black Data Forwarder Docs](https://docs.vmware.com/en/VMware-Carbon-Black-Cloud/services/carbon-black-cloud-user-guide/GUID-E8D33F72-BABB-4157-A908-D8BBDB5AF349.html)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/CarbonBlackViaAWSS3_ConnectorDefinition.json;https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/VMwareCarbonBlackCloud_ccp/CarbonBlack_DataConnectorDefination.json","false" +"ASimNetworkSessionLogs","VMware Carbon Black Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud","azuresentinel","azure-sentinel-solution-vmwarecarbonblack","2022-06-01","","","Microsoft","Microsoft","https://support.microsoft.com/","","domains","carbonBlackAWSS3","Microsoft","VMware Carbon Black Cloud via AWS S3","The [VMware Carbon Black Cloud](https://www.vmware.com/products/carbon-black-cloud.html) via AWS S3 data connector provides the capability to ingest watchlist, alerts, auth and endpoints events via AWS S3 and stream them to ASIM normalized tables. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS CloudFormation Deployment \n To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create) \n 2. In AWS, choose the 'Upload a template file' option and click on 'Choose file'. Select the downloaded template \n 3. Click 'Next' and 'Create stack'""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS Carbon Black resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CarbonBlack""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""When deploying 'Template 2: AWS Carbon Black resources deployment' template you'll need supply a few parameters \n * **Stack Name**: A stack name of your choosing (will appear in the list of stacks in AWS)\n * **Role Name**: Must begin with 'OIDC_' prefix, has a default value. \n * **Bucket Name**: Bucket name of your choosing, if you already have an existing bucket paste the name here \n * **CreateNewBucket**: If you already have an existing bucket that you would like to use for this connector select 'false' for this option, otherwise a bucket with the name you entered in 'Bucket Name' will be created from this stack. \n * **Region**: This is the region of the AWS resources based on Carbon Black's mapping - for more information please see [Carbon Black documentation](https://developer.carbonblack.com/reference/carbon-black-cloud/integrations/data-forwarder/quick-setup/#create-a-bucket).\n * **SQSQueuePrefix**: The stack create multiple queues, this prefix will be added to each one of them. \n * **WorkspaceID**: Use the Workspace ID provided below.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Once the deployment is complete - head to the 'Outputs' tab, you will see: Role ARN, S3 bucket and 4 SQS resources created. You will need those resources in the next step when configuring Carbon Black's data forwarders and the data connector.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Carbon Black data forwarder configuration \n After all AWS resources has been created you'll need to configure Carbon Black to forward the events to the AWS buckets for Microsoft Sentinel to ingest them. Follow [Carbon Black's documentation on how to create a 'Data Forwarders'](https://developer.carbonblack.com/reference/carbon-black-cloud/integrations/data-forwarder/quick-setup/#2-create-a-forwarder) Use the first recommended option. When asked to input a bucket name use the bucket created in the previous step. \n You will be required to add 'S3 prefix' for each forwarder, please use this mapping:""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Alert | carbon-black-cloud-forwarder/Alerts |\n | Auth Events | carbon-black-cloud-forwarder/Auth |\n | Endpoint Events | carbon-black-cloud-forwarder/Endpoint |\n | Watchlist Hit | carbon-black-cloud-forwarder/Watchlist |""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2.1. Test your data forwarder (Optional) \n To validate the data forwarder is configured as expected, in Carbon Black's portal search for the data forwarder that you just created and click on 'Test Forwarder' button under the 'Actions' column, this will generate a 'HealthCheck' file in the S3 Bucket, you should see it appear immediately.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the 'Add new collector' button, fill the required information, the ARN role and the SQS URL are created in step 1, note that you will need to enter the correct SQS URL and select the appropriate event type from the dropdown, for example if you want to ingest Alert events you will need to copy the Alerts SQS URL and select the 'Alerts' event type in the dropdown""}}]}, {""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CarbonBlackAlertsStream"", ""text"": ""Alerts""}, {""key"": ""Custom-CarbonBlackAuthStream"", ""text"": ""Auth Events""}, {""key"": ""Custom-CarbonBlackEndpointStream"", ""text"": ""Endpoint Events""}, {""key"": ""Custom-CarbonBlackWatchlistStream"", ""text"": ""Watchlist""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Environment"", ""description"": ""You must have the following AWS resources defined and configured: S3, Simple Queue Service (SQS), IAM roles and permissions policies""}, {""name"": ""Environment"", ""description"": ""You must have the a Carbon black account and required permissions to create a Data Forwarded to AWS S3 buckets. \nFor more details visit [Carbon Black Data Forwarder Docs](https://docs.vmware.com/en/VMware-Carbon-Black-Cloud/services/carbon-black-cloud-user-guide/GUID-E8D33F72-BABB-4157-A908-D8BBDB5AF349.html)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/CarbonBlackViaAWSS3_ConnectorDefinition.json;https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/VMwareCarbonBlackCloud_ccp/CarbonBlack_DataConnectorDefination.json","false" +"ASimProcessEventLogs","VMware Carbon Black Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud","azuresentinel","azure-sentinel-solution-vmwarecarbonblack","2022-06-01","","","Microsoft","Microsoft","https://support.microsoft.com/","","domains","carbonBlackAWSS3","Microsoft","VMware Carbon Black Cloud via AWS S3","The [VMware Carbon Black Cloud](https://www.vmware.com/products/carbon-black-cloud.html) via AWS S3 data connector provides the capability to ingest watchlist, alerts, auth and endpoints events via AWS S3 and stream them to ASIM normalized tables. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS CloudFormation Deployment \n To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create) \n 2. In AWS, choose the 'Upload a template file' option and click on 'Choose file'. Select the downloaded template \n 3. Click 'Next' and 'Create stack'""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS Carbon Black resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CarbonBlack""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""When deploying 'Template 2: AWS Carbon Black resources deployment' template you'll need supply a few parameters \n * **Stack Name**: A stack name of your choosing (will appear in the list of stacks in AWS)\n * **Role Name**: Must begin with 'OIDC_' prefix, has a default value. \n * **Bucket Name**: Bucket name of your choosing, if you already have an existing bucket paste the name here \n * **CreateNewBucket**: If you already have an existing bucket that you would like to use for this connector select 'false' for this option, otherwise a bucket with the name you entered in 'Bucket Name' will be created from this stack. \n * **Region**: This is the region of the AWS resources based on Carbon Black's mapping - for more information please see [Carbon Black documentation](https://developer.carbonblack.com/reference/carbon-black-cloud/integrations/data-forwarder/quick-setup/#create-a-bucket).\n * **SQSQueuePrefix**: The stack create multiple queues, this prefix will be added to each one of them. \n * **WorkspaceID**: Use the Workspace ID provided below.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Once the deployment is complete - head to the 'Outputs' tab, you will see: Role ARN, S3 bucket and 4 SQS resources created. You will need those resources in the next step when configuring Carbon Black's data forwarders and the data connector.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Carbon Black data forwarder configuration \n After all AWS resources has been created you'll need to configure Carbon Black to forward the events to the AWS buckets for Microsoft Sentinel to ingest them. Follow [Carbon Black's documentation on how to create a 'Data Forwarders'](https://developer.carbonblack.com/reference/carbon-black-cloud/integrations/data-forwarder/quick-setup/#2-create-a-forwarder) Use the first recommended option. When asked to input a bucket name use the bucket created in the previous step. \n You will be required to add 'S3 prefix' for each forwarder, please use this mapping:""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Alert | carbon-black-cloud-forwarder/Alerts |\n | Auth Events | carbon-black-cloud-forwarder/Auth |\n | Endpoint Events | carbon-black-cloud-forwarder/Endpoint |\n | Watchlist Hit | carbon-black-cloud-forwarder/Watchlist |""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2.1. Test your data forwarder (Optional) \n To validate the data forwarder is configured as expected, in Carbon Black's portal search for the data forwarder that you just created and click on 'Test Forwarder' button under the 'Actions' column, this will generate a 'HealthCheck' file in the S3 Bucket, you should see it appear immediately.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the 'Add new collector' button, fill the required information, the ARN role and the SQS URL are created in step 1, note that you will need to enter the correct SQS URL and select the appropriate event type from the dropdown, for example if you want to ingest Alert events you will need to copy the Alerts SQS URL and select the 'Alerts' event type in the dropdown""}}]}, {""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CarbonBlackAlertsStream"", ""text"": ""Alerts""}, {""key"": ""Custom-CarbonBlackAuthStream"", ""text"": ""Auth Events""}, {""key"": ""Custom-CarbonBlackEndpointStream"", ""text"": ""Endpoint Events""}, {""key"": ""Custom-CarbonBlackWatchlistStream"", ""text"": ""Watchlist""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Environment"", ""description"": ""You must have the following AWS resources defined and configured: S3, Simple Queue Service (SQS), IAM roles and permissions policies""}, {""name"": ""Environment"", ""description"": ""You must have the a Carbon black account and required permissions to create a Data Forwarded to AWS S3 buckets. \nFor more details visit [Carbon Black Data Forwarder Docs](https://docs.vmware.com/en/VMware-Carbon-Black-Cloud/services/carbon-black-cloud-user-guide/GUID-E8D33F72-BABB-4157-A908-D8BBDB5AF349.html)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/CarbonBlackViaAWSS3_ConnectorDefinition.json;https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/VMwareCarbonBlackCloud_ccp/CarbonBlack_DataConnectorDefination.json","false" +"ASimRegistryEventLogs","VMware Carbon Black Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud","azuresentinel","azure-sentinel-solution-vmwarecarbonblack","2022-06-01","","","Microsoft","Microsoft","https://support.microsoft.com/","","domains","carbonBlackAWSS3","Microsoft","VMware Carbon Black Cloud via AWS S3","The [VMware Carbon Black Cloud](https://www.vmware.com/products/carbon-black-cloud.html) via AWS S3 data connector provides the capability to ingest watchlist, alerts, auth and endpoints events via AWS S3 and stream them to ASIM normalized tables. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS CloudFormation Deployment \n To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create) \n 2. In AWS, choose the 'Upload a template file' option and click on 'Choose file'. Select the downloaded template \n 3. Click 'Next' and 'Create stack'""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS Carbon Black resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CarbonBlack""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""When deploying 'Template 2: AWS Carbon Black resources deployment' template you'll need supply a few parameters \n * **Stack Name**: A stack name of your choosing (will appear in the list of stacks in AWS)\n * **Role Name**: Must begin with 'OIDC_' prefix, has a default value. \n * **Bucket Name**: Bucket name of your choosing, if you already have an existing bucket paste the name here \n * **CreateNewBucket**: If you already have an existing bucket that you would like to use for this connector select 'false' for this option, otherwise a bucket with the name you entered in 'Bucket Name' will be created from this stack. \n * **Region**: This is the region of the AWS resources based on Carbon Black's mapping - for more information please see [Carbon Black documentation](https://developer.carbonblack.com/reference/carbon-black-cloud/integrations/data-forwarder/quick-setup/#create-a-bucket).\n * **SQSQueuePrefix**: The stack create multiple queues, this prefix will be added to each one of them. \n * **WorkspaceID**: Use the Workspace ID provided below.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Once the deployment is complete - head to the 'Outputs' tab, you will see: Role ARN, S3 bucket and 4 SQS resources created. You will need those resources in the next step when configuring Carbon Black's data forwarders and the data connector.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Carbon Black data forwarder configuration \n After all AWS resources has been created you'll need to configure Carbon Black to forward the events to the AWS buckets for Microsoft Sentinel to ingest them. Follow [Carbon Black's documentation on how to create a 'Data Forwarders'](https://developer.carbonblack.com/reference/carbon-black-cloud/integrations/data-forwarder/quick-setup/#2-create-a-forwarder) Use the first recommended option. When asked to input a bucket name use the bucket created in the previous step. \n You will be required to add 'S3 prefix' for each forwarder, please use this mapping:""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Alert | carbon-black-cloud-forwarder/Alerts |\n | Auth Events | carbon-black-cloud-forwarder/Auth |\n | Endpoint Events | carbon-black-cloud-forwarder/Endpoint |\n | Watchlist Hit | carbon-black-cloud-forwarder/Watchlist |""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2.1. Test your data forwarder (Optional) \n To validate the data forwarder is configured as expected, in Carbon Black's portal search for the data forwarder that you just created and click on 'Test Forwarder' button under the 'Actions' column, this will generate a 'HealthCheck' file in the S3 Bucket, you should see it appear immediately.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the 'Add new collector' button, fill the required information, the ARN role and the SQS URL are created in step 1, note that you will need to enter the correct SQS URL and select the appropriate event type from the dropdown, for example if you want to ingest Alert events you will need to copy the Alerts SQS URL and select the 'Alerts' event type in the dropdown""}}]}, {""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CarbonBlackAlertsStream"", ""text"": ""Alerts""}, {""key"": ""Custom-CarbonBlackAuthStream"", ""text"": ""Auth Events""}, {""key"": ""Custom-CarbonBlackEndpointStream"", ""text"": ""Endpoint Events""}, {""key"": ""Custom-CarbonBlackWatchlistStream"", ""text"": ""Watchlist""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Environment"", ""description"": ""You must have the following AWS resources defined and configured: S3, Simple Queue Service (SQS), IAM roles and permissions policies""}, {""name"": ""Environment"", ""description"": ""You must have the a Carbon black account and required permissions to create a Data Forwarded to AWS S3 buckets. \nFor more details visit [Carbon Black Data Forwarder Docs](https://docs.vmware.com/en/VMware-Carbon-Black-Cloud/services/carbon-black-cloud-user-guide/GUID-E8D33F72-BABB-4157-A908-D8BBDB5AF349.html)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/CarbonBlackViaAWSS3_ConnectorDefinition.json;https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/VMwareCarbonBlackCloud_ccp/CarbonBlack_DataConnectorDefination.json","false" +"CarbonBlack_Alerts_CL","VMware Carbon Black Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud","azuresentinel","azure-sentinel-solution-vmwarecarbonblack","2022-06-01","","","Microsoft","Microsoft","https://support.microsoft.com/","","domains","carbonBlackAWSS3","Microsoft","VMware Carbon Black Cloud via AWS S3","The [VMware Carbon Black Cloud](https://www.vmware.com/products/carbon-black-cloud.html) via AWS S3 data connector provides the capability to ingest watchlist, alerts, auth and endpoints events via AWS S3 and stream them to ASIM normalized tables. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS CloudFormation Deployment \n To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create) \n 2. In AWS, choose the 'Upload a template file' option and click on 'Choose file'. Select the downloaded template \n 3. Click 'Next' and 'Create stack'""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS Carbon Black resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CarbonBlack""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""When deploying 'Template 2: AWS Carbon Black resources deployment' template you'll need supply a few parameters \n * **Stack Name**: A stack name of your choosing (will appear in the list of stacks in AWS)\n * **Role Name**: Must begin with 'OIDC_' prefix, has a default value. \n * **Bucket Name**: Bucket name of your choosing, if you already have an existing bucket paste the name here \n * **CreateNewBucket**: If you already have an existing bucket that you would like to use for this connector select 'false' for this option, otherwise a bucket with the name you entered in 'Bucket Name' will be created from this stack. \n * **Region**: This is the region of the AWS resources based on Carbon Black's mapping - for more information please see [Carbon Black documentation](https://developer.carbonblack.com/reference/carbon-black-cloud/integrations/data-forwarder/quick-setup/#create-a-bucket).\n * **SQSQueuePrefix**: The stack create multiple queues, this prefix will be added to each one of them. \n * **WorkspaceID**: Use the Workspace ID provided below.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Once the deployment is complete - head to the 'Outputs' tab, you will see: Role ARN, S3 bucket and 4 SQS resources created. You will need those resources in the next step when configuring Carbon Black's data forwarders and the data connector.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Carbon Black data forwarder configuration \n After all AWS resources has been created you'll need to configure Carbon Black to forward the events to the AWS buckets for Microsoft Sentinel to ingest them. Follow [Carbon Black's documentation on how to create a 'Data Forwarders'](https://developer.carbonblack.com/reference/carbon-black-cloud/integrations/data-forwarder/quick-setup/#2-create-a-forwarder) Use the first recommended option. When asked to input a bucket name use the bucket created in the previous step. \n You will be required to add 'S3 prefix' for each forwarder, please use this mapping:""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Alert | carbon-black-cloud-forwarder/Alerts |\n | Auth Events | carbon-black-cloud-forwarder/Auth |\n | Endpoint Events | carbon-black-cloud-forwarder/Endpoint |\n | Watchlist Hit | carbon-black-cloud-forwarder/Watchlist |""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2.1. Test your data forwarder (Optional) \n To validate the data forwarder is configured as expected, in Carbon Black's portal search for the data forwarder that you just created and click on 'Test Forwarder' button under the 'Actions' column, this will generate a 'HealthCheck' file in the S3 Bucket, you should see it appear immediately.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the 'Add new collector' button, fill the required information, the ARN role and the SQS URL are created in step 1, note that you will need to enter the correct SQS URL and select the appropriate event type from the dropdown, for example if you want to ingest Alert events you will need to copy the Alerts SQS URL and select the 'Alerts' event type in the dropdown""}}]}, {""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CarbonBlackAlertsStream"", ""text"": ""Alerts""}, {""key"": ""Custom-CarbonBlackAuthStream"", ""text"": ""Auth Events""}, {""key"": ""Custom-CarbonBlackEndpointStream"", ""text"": ""Endpoint Events""}, {""key"": ""Custom-CarbonBlackWatchlistStream"", ""text"": ""Watchlist""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Environment"", ""description"": ""You must have the following AWS resources defined and configured: S3, Simple Queue Service (SQS), IAM roles and permissions policies""}, {""name"": ""Environment"", ""description"": ""You must have the a Carbon black account and required permissions to create a Data Forwarded to AWS S3 buckets. \nFor more details visit [Carbon Black Data Forwarder Docs](https://docs.vmware.com/en/VMware-Carbon-Black-Cloud/services/carbon-black-cloud-user-guide/GUID-E8D33F72-BABB-4157-A908-D8BBDB5AF349.html)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/CarbonBlackViaAWSS3_ConnectorDefinition.json;https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/VMwareCarbonBlackCloud_ccp/CarbonBlack_DataConnectorDefination.json","false" +"CarbonBlack_Watchlist_CL","VMware Carbon Black Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud","azuresentinel","azure-sentinel-solution-vmwarecarbonblack","2022-06-01","","","Microsoft","Microsoft","https://support.microsoft.com/","","domains","carbonBlackAWSS3","Microsoft","VMware Carbon Black Cloud via AWS S3","The [VMware Carbon Black Cloud](https://www.vmware.com/products/carbon-black-cloud.html) via AWS S3 data connector provides the capability to ingest watchlist, alerts, auth and endpoints events via AWS S3 and stream them to ASIM normalized tables. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS CloudFormation Deployment \n To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create) \n 2. In AWS, choose the 'Upload a template file' option and click on 'Choose file'. Select the downloaded template \n 3. Click 'Next' and 'Create stack'""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS Carbon Black resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CarbonBlack""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""When deploying 'Template 2: AWS Carbon Black resources deployment' template you'll need supply a few parameters \n * **Stack Name**: A stack name of your choosing (will appear in the list of stacks in AWS)\n * **Role Name**: Must begin with 'OIDC_' prefix, has a default value. \n * **Bucket Name**: Bucket name of your choosing, if you already have an existing bucket paste the name here \n * **CreateNewBucket**: If you already have an existing bucket that you would like to use for this connector select 'false' for this option, otherwise a bucket with the name you entered in 'Bucket Name' will be created from this stack. \n * **Region**: This is the region of the AWS resources based on Carbon Black's mapping - for more information please see [Carbon Black documentation](https://developer.carbonblack.com/reference/carbon-black-cloud/integrations/data-forwarder/quick-setup/#create-a-bucket).\n * **SQSQueuePrefix**: The stack create multiple queues, this prefix will be added to each one of them. \n * **WorkspaceID**: Use the Workspace ID provided below.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Once the deployment is complete - head to the 'Outputs' tab, you will see: Role ARN, S3 bucket and 4 SQS resources created. You will need those resources in the next step when configuring Carbon Black's data forwarders and the data connector.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Carbon Black data forwarder configuration \n After all AWS resources has been created you'll need to configure Carbon Black to forward the events to the AWS buckets for Microsoft Sentinel to ingest them. Follow [Carbon Black's documentation on how to create a 'Data Forwarders'](https://developer.carbonblack.com/reference/carbon-black-cloud/integrations/data-forwarder/quick-setup/#2-create-a-forwarder) Use the first recommended option. When asked to input a bucket name use the bucket created in the previous step. \n You will be required to add 'S3 prefix' for each forwarder, please use this mapping:""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Alert | carbon-black-cloud-forwarder/Alerts |\n | Auth Events | carbon-black-cloud-forwarder/Auth |\n | Endpoint Events | carbon-black-cloud-forwarder/Endpoint |\n | Watchlist Hit | carbon-black-cloud-forwarder/Watchlist |""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2.1. Test your data forwarder (Optional) \n To validate the data forwarder is configured as expected, in Carbon Black's portal search for the data forwarder that you just created and click on 'Test Forwarder' button under the 'Actions' column, this will generate a 'HealthCheck' file in the S3 Bucket, you should see it appear immediately.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the 'Add new collector' button, fill the required information, the ARN role and the SQS URL are created in step 1, note that you will need to enter the correct SQS URL and select the appropriate event type from the dropdown, for example if you want to ingest Alert events you will need to copy the Alerts SQS URL and select the 'Alerts' event type in the dropdown""}}]}, {""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CarbonBlackAlertsStream"", ""text"": ""Alerts""}, {""key"": ""Custom-CarbonBlackAuthStream"", ""text"": ""Auth Events""}, {""key"": ""Custom-CarbonBlackEndpointStream"", ""text"": ""Endpoint Events""}, {""key"": ""Custom-CarbonBlackWatchlistStream"", ""text"": ""Watchlist""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Environment"", ""description"": ""You must have the following AWS resources defined and configured: S3, Simple Queue Service (SQS), IAM roles and permissions policies""}, {""name"": ""Environment"", ""description"": ""You must have the a Carbon black account and required permissions to create a Data Forwarded to AWS S3 buckets. \nFor more details visit [Carbon Black Data Forwarder Docs](https://docs.vmware.com/en/VMware-Carbon-Black-Cloud/services/carbon-black-cloud-user-guide/GUID-E8D33F72-BABB-4157-A908-D8BBDB5AF349.html)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/CarbonBlackViaAWSS3_ConnectorDefinition.json;https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/VMwareCarbonBlackCloud_ccp/CarbonBlack_DataConnectorDefination.json","false" +"VMware_CWS_DLPLogs_CL","VMware SD-WAN and SASE","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20SD-WAN%20and%20SASE","velocloud","azure-sentinel-solution-vmwaresase","2023-12-31","","","VMware by Broadcom","Partner","https://developer.vmware.com/","","domains","VMwareSDWAN","VMware by Broadcom","VMware SD-WAN and SASE Connector","The [VMware SD-WAN & SASE](https://sase.vmware.com) data connector offers the capability to ingest VMware SD-WAN and CWS events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://developer.vmware.com/apis/vmware-sase-platform/) for more information. The connector provides ability to get events which helps to examine potential network security issues, identify misconfigured network devices and monitor SD-WAN and SASE usage. If you have your own custom connector, make sure that the connector is deployed under an isolated Log Analytics Workspace first. In case of issues, questions or feature requests, please contact us via email on sase-siem-integration@vmware.com.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the VMware Edge Cloud Orchestrator REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the VECO API**\n\n [Follow the instructions](https://docs.vmware.com/en/VMware-SD-WAN/5.3/VMware-SD-WAN-Administration-Guide/GUID-2FA3763F-835C-4D10-A32B-450FEB5397D8.html) to create and obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function.**""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the VMware SD-WAN and SASE Connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelvmwaresdwan)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**.\n3. Enter or modify the Function App, Log Analytics and Azure Monitor settings, enter the VECO FQDN (without https://, for example vco123-usvi1.velocloud.net), enter the API token created (including \""Token \"" at the beginning of the string), and adjust your desired Function App freaquency, then deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the VMware SD-WAN and SASE Connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-vmwaresdwan-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. vmwsase-siemXXXXXXXXXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.10.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab .\n3. Check if the application has these settings defined correctly and adjust if needed: \n\t\tapi_veco_authorization\n\t\tapi_veco_fqdn\n\t\tapp_frequency_mins\n\t\tazsa_share_connectionstring\n\t\tazsa_share_name dce_endpoint\n\t\tdcr_cwsdlplog_immutableid\n\t\tdcr_cwshealth_immutableid\n\t\tdcr_cwsweblog_immutableid\n\t\tdcr_efsfwlog_immutableid\n\t\tdcr_efshealth_immutableid\n\t\tdcr_saseaudit_immutableid\n\t\tstream_cwsdlplog\n\t\tstream_cwshealth\n\t\tstream_cwsweblog\n\t\tstream_efsfwlog\n\t\tstream_efshealth\n\t\tstream_saseaudit\n3. In case you made changes to application settings have been entered, make sure that you click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**api_veco_authorization**, **api_veco_fqdn** is required for REST API. [See the documentation to learn more about VMware SASE APIs](https://developer.vmware.com/apis/vmware-sase-platform/). Check all [requirements and follow the instructions](https://docs.vmware.com/en/VMware-SD-WAN/5.3/VMware-SD-WAN-Administration-Guide/GUID-2FA3763F-835C-4D10-A32B-450FEB5397D8.html) for obtaining credentials. The Function App only supports token-based API authentication. Be advised that the API Token generated will inherit the access rights of the user account under which it was generated.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20SD-WAN%20and%20SASE/Data%20Connectors/Function%20App%20Connector/VMwareSASE_API_FunctionApp.json","true" +"VMware_CWS_Health_CL","VMware SD-WAN and SASE","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20SD-WAN%20and%20SASE","velocloud","azure-sentinel-solution-vmwaresase","2023-12-31","","","VMware by Broadcom","Partner","https://developer.vmware.com/","","domains","VMwareSDWAN","VMware by Broadcom","VMware SD-WAN and SASE Connector","The [VMware SD-WAN & SASE](https://sase.vmware.com) data connector offers the capability to ingest VMware SD-WAN and CWS events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://developer.vmware.com/apis/vmware-sase-platform/) for more information. The connector provides ability to get events which helps to examine potential network security issues, identify misconfigured network devices and monitor SD-WAN and SASE usage. If you have your own custom connector, make sure that the connector is deployed under an isolated Log Analytics Workspace first. In case of issues, questions or feature requests, please contact us via email on sase-siem-integration@vmware.com.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the VMware Edge Cloud Orchestrator REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the VECO API**\n\n [Follow the instructions](https://docs.vmware.com/en/VMware-SD-WAN/5.3/VMware-SD-WAN-Administration-Guide/GUID-2FA3763F-835C-4D10-A32B-450FEB5397D8.html) to create and obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function.**""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the VMware SD-WAN and SASE Connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelvmwaresdwan)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**.\n3. Enter or modify the Function App, Log Analytics and Azure Monitor settings, enter the VECO FQDN (without https://, for example vco123-usvi1.velocloud.net), enter the API token created (including \""Token \"" at the beginning of the string), and adjust your desired Function App freaquency, then deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the VMware SD-WAN and SASE Connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-vmwaresdwan-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. vmwsase-siemXXXXXXXXXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.10.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab .\n3. Check if the application has these settings defined correctly and adjust if needed: \n\t\tapi_veco_authorization\n\t\tapi_veco_fqdn\n\t\tapp_frequency_mins\n\t\tazsa_share_connectionstring\n\t\tazsa_share_name dce_endpoint\n\t\tdcr_cwsdlplog_immutableid\n\t\tdcr_cwshealth_immutableid\n\t\tdcr_cwsweblog_immutableid\n\t\tdcr_efsfwlog_immutableid\n\t\tdcr_efshealth_immutableid\n\t\tdcr_saseaudit_immutableid\n\t\tstream_cwsdlplog\n\t\tstream_cwshealth\n\t\tstream_cwsweblog\n\t\tstream_efsfwlog\n\t\tstream_efshealth\n\t\tstream_saseaudit\n3. In case you made changes to application settings have been entered, make sure that you click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**api_veco_authorization**, **api_veco_fqdn** is required for REST API. [See the documentation to learn more about VMware SASE APIs](https://developer.vmware.com/apis/vmware-sase-platform/). Check all [requirements and follow the instructions](https://docs.vmware.com/en/VMware-SD-WAN/5.3/VMware-SD-WAN-Administration-Guide/GUID-2FA3763F-835C-4D10-A32B-450FEB5397D8.html) for obtaining credentials. The Function App only supports token-based API authentication. Be advised that the API Token generated will inherit the access rights of the user account under which it was generated.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20SD-WAN%20and%20SASE/Data%20Connectors/Function%20App%20Connector/VMwareSASE_API_FunctionApp.json","true" +"VMware_CWS_Weblogs_CL","VMware SD-WAN and SASE","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20SD-WAN%20and%20SASE","velocloud","azure-sentinel-solution-vmwaresase","2023-12-31","","","VMware by Broadcom","Partner","https://developer.vmware.com/","","domains","VMwareSDWAN","VMware by Broadcom","VMware SD-WAN and SASE Connector","The [VMware SD-WAN & SASE](https://sase.vmware.com) data connector offers the capability to ingest VMware SD-WAN and CWS events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://developer.vmware.com/apis/vmware-sase-platform/) for more information. The connector provides ability to get events which helps to examine potential network security issues, identify misconfigured network devices and monitor SD-WAN and SASE usage. If you have your own custom connector, make sure that the connector is deployed under an isolated Log Analytics Workspace first. In case of issues, questions or feature requests, please contact us via email on sase-siem-integration@vmware.com.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the VMware Edge Cloud Orchestrator REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the VECO API**\n\n [Follow the instructions](https://docs.vmware.com/en/VMware-SD-WAN/5.3/VMware-SD-WAN-Administration-Guide/GUID-2FA3763F-835C-4D10-A32B-450FEB5397D8.html) to create and obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function.**""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the VMware SD-WAN and SASE Connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelvmwaresdwan)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**.\n3. Enter or modify the Function App, Log Analytics and Azure Monitor settings, enter the VECO FQDN (without https://, for example vco123-usvi1.velocloud.net), enter the API token created (including \""Token \"" at the beginning of the string), and adjust your desired Function App freaquency, then deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the VMware SD-WAN and SASE Connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-vmwaresdwan-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. vmwsase-siemXXXXXXXXXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.10.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab .\n3. Check if the application has these settings defined correctly and adjust if needed: \n\t\tapi_veco_authorization\n\t\tapi_veco_fqdn\n\t\tapp_frequency_mins\n\t\tazsa_share_connectionstring\n\t\tazsa_share_name dce_endpoint\n\t\tdcr_cwsdlplog_immutableid\n\t\tdcr_cwshealth_immutableid\n\t\tdcr_cwsweblog_immutableid\n\t\tdcr_efsfwlog_immutableid\n\t\tdcr_efshealth_immutableid\n\t\tdcr_saseaudit_immutableid\n\t\tstream_cwsdlplog\n\t\tstream_cwshealth\n\t\tstream_cwsweblog\n\t\tstream_efsfwlog\n\t\tstream_efshealth\n\t\tstream_saseaudit\n3. In case you made changes to application settings have been entered, make sure that you click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**api_veco_authorization**, **api_veco_fqdn** is required for REST API. [See the documentation to learn more about VMware SASE APIs](https://developer.vmware.com/apis/vmware-sase-platform/). Check all [requirements and follow the instructions](https://docs.vmware.com/en/VMware-SD-WAN/5.3/VMware-SD-WAN-Administration-Guide/GUID-2FA3763F-835C-4D10-A32B-450FEB5397D8.html) for obtaining credentials. The Function App only supports token-based API authentication. Be advised that the API Token generated will inherit the access rights of the user account under which it was generated.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20SD-WAN%20and%20SASE/Data%20Connectors/Function%20App%20Connector/VMwareSASE_API_FunctionApp.json","true" +"VMware_VECO_EventLogs_CL","VMware SD-WAN and SASE","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20SD-WAN%20and%20SASE","velocloud","azure-sentinel-solution-vmwaresase","2023-12-31","","","VMware by Broadcom","Partner","https://developer.vmware.com/","","domains","VMwareSDWAN","VMware by Broadcom","VMware SD-WAN and SASE Connector","The [VMware SD-WAN & SASE](https://sase.vmware.com) data connector offers the capability to ingest VMware SD-WAN and CWS events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://developer.vmware.com/apis/vmware-sase-platform/) for more information. The connector provides ability to get events which helps to examine potential network security issues, identify misconfigured network devices and monitor SD-WAN and SASE usage. If you have your own custom connector, make sure that the connector is deployed under an isolated Log Analytics Workspace first. In case of issues, questions or feature requests, please contact us via email on sase-siem-integration@vmware.com.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the VMware Edge Cloud Orchestrator REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the VECO API**\n\n [Follow the instructions](https://docs.vmware.com/en/VMware-SD-WAN/5.3/VMware-SD-WAN-Administration-Guide/GUID-2FA3763F-835C-4D10-A32B-450FEB5397D8.html) to create and obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function.**""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the VMware SD-WAN and SASE Connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelvmwaresdwan)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**.\n3. Enter or modify the Function App, Log Analytics and Azure Monitor settings, enter the VECO FQDN (without https://, for example vco123-usvi1.velocloud.net), enter the API token created (including \""Token \"" at the beginning of the string), and adjust your desired Function App freaquency, then deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the VMware SD-WAN and SASE Connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-vmwaresdwan-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. vmwsase-siemXXXXXXXXXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.10.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab .\n3. Check if the application has these settings defined correctly and adjust if needed: \n\t\tapi_veco_authorization\n\t\tapi_veco_fqdn\n\t\tapp_frequency_mins\n\t\tazsa_share_connectionstring\n\t\tazsa_share_name dce_endpoint\n\t\tdcr_cwsdlplog_immutableid\n\t\tdcr_cwshealth_immutableid\n\t\tdcr_cwsweblog_immutableid\n\t\tdcr_efsfwlog_immutableid\n\t\tdcr_efshealth_immutableid\n\t\tdcr_saseaudit_immutableid\n\t\tstream_cwsdlplog\n\t\tstream_cwshealth\n\t\tstream_cwsweblog\n\t\tstream_efsfwlog\n\t\tstream_efshealth\n\t\tstream_saseaudit\n3. In case you made changes to application settings have been entered, make sure that you click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**api_veco_authorization**, **api_veco_fqdn** is required for REST API. [See the documentation to learn more about VMware SASE APIs](https://developer.vmware.com/apis/vmware-sase-platform/). Check all [requirements and follow the instructions](https://docs.vmware.com/en/VMware-SD-WAN/5.3/VMware-SD-WAN-Administration-Guide/GUID-2FA3763F-835C-4D10-A32B-450FEB5397D8.html) for obtaining credentials. The Function App only supports token-based API authentication. Be advised that the API Token generated will inherit the access rights of the user account under which it was generated.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20SD-WAN%20and%20SASE/Data%20Connectors/Function%20App%20Connector/VMwareSASE_API_FunctionApp.json","true" +"vcenter_CL","VMware vCenter","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20vCenter","azuresentinel","azure-sentinel-solution-vcenter","2022-06-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","VMwarevCenter","VMware","[Deprecated] VMware vCenter","The [vCenter](https://www.vmware.com/in/products/vcenter-server.html) connector allows you to easily connect your vCenter server logs with Microsoft Sentinel. This gives you more insight into your organization's data centers and improves your security operation capabilities.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias VMware vCenter and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20vCenter/Parsers/vCenter.txt), on the second line of the query, enter the hostname(s) of your VMware vCenter device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. \n> 1. If you have not installed the vCenter solution from ContentHub then [Follow the steps](https://aka.ms/sentinel-vCenter-parser) to use the Kusto function alias, **vCenter**"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Follow the configuration steps below to get vCenter server logs into Microsoft Sentinel. Refer to the [Azure Monitor Documentation](https://docs.microsoft.com/azure/azure-monitor/agents/data-sources-json) for more details on these steps.\n For vCenter Server logs, we have issues while parsing the data by OMS agent data using default settings. \nSo we advice to capture the logs into custom table **vcenter_CL** using below instructions. \n1. Login to the server where you have installed OMS agent.\n2. Download config file vCenter.conf \n\t\twget -v https://aka.ms/sentinel-vcenteroms-conf -O vcenter.conf \n3. Copy vcenter.conf to the /etc/opt/microsoft/omsagent/**workspace_id**/conf/omsagent.d/ folder. \n\t\tcp vcenter.conf /etc/opt/microsoft/omsagent/<>/conf/omsagent.d/\n4. Edit vcenter.conf as follows:\n\n\t a. vcenter.conf uses the port **22033** by default. Ensure this port is not being used by any other source on your server\n\n\t b. If you would like to change the default port for **vcenter.conf** make sure that you dont use default Azure monotoring /log analytic agent ports I.e.(For example CEF uses TCP port **25226** or **25224**) \n\n\t c. replace **workspace_id** with real value of your Workspace ID (lines 13,14,15,18)\n5. Save changes and restart the Azure Log Analytics agent for Linux service with the following command:\n\t\tsudo /opt/microsoft/omsagent/bin/service_control restart\n6. Modify /etc/rsyslog.conf file - add below template preferably at the beginning / before directives section \n\n\t\t$template vcenter,\""%timestamp% %hostname% %msg%\\ n\"" \n\n **Note - There is no space between slash(\\\\) and character 'n' in above command.**\n\n 7. Create a custom conf file in /etc/rsyslog.d/ for example 10-vcenter.conf and add following filter conditions.\n\nDownload config file [10-vCenter.conf](https://aka.ms/sentinel-vcenter-conf)\n\n\t With an added statement you will need to create a filter which will specify the logs coming from the vcenter server to be forwarded to the custom table.\n\n\t reference: [Filter Conditions \u2014 rsyslog 8.18.0.master documentation](https://rsyslog.readthedocs.io/en/latest/configuration/filters.html)\n\n\t Here is an example of filtering that can be defined, this is not complete and will require additional testing for each installation.\n\t\t if $rawmsg contains \""vcenter-server\"" then @@127.0.0.1:22033;vcenter\n\t\t & stop \n\t\t if $rawmsg contains \""vpxd\"" then @@127.0.0.1:22033;vcenter\n\t\t & stop\n\t\t \n8. Restart rsyslog\n\t\t systemctl restart rsyslog"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Configure and connect the vCenter device(s)"", ""description"": ""[Follow these instructions](https://docs.vmware.com/en/VMware-vSphere/7.0/com.vmware.vsphere.monitoring.doc/GUID-9633A961-A5C3-4658-B099-B81E0512DC21.html) to configure the vCenter to forward syslog. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Include custom pre-requisites if the connectivity requires - else delete customs"", ""description"": ""Description for any custom pre-requisite""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20vCenter/Data%20Connectors/Connector_Syslog_vcenter.json","true" +"ValenceAlert_CL","Valence Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Valence%20Security","valencesecurityinc1673598943514","valence_sentinel_solution","2023-11-20","","","Valence Security","Partner","https://www.valencesecurity.com/","","domains","ValenceSecurity","Valence Security","SaaS Security","Connects the Valence SaaS security platform Azure Log Analytics via the REST API interface.","[{""title"": ""Step 1 : Read the detailed documentation"", ""description"": ""The installation process is documented in great detail in [Valence Security's knowledge base](https://support.valencesecurity.com). The user should consult this documentation further to understand installation and debug of the integration.""}, {""title"": ""Step 2: Retrieve the workspace access credentials"", ""description"": ""The first installation step is to retrieve both your **Workspace ID** and **Primary Key** from the Microsoft Sentinel platform.\nCopy the values shown below and save them for configuration of the API log forwarder integration."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Step 3: Configure Sentinel integration on the Valence Security Platform"", ""description"": ""As a Valence Security Platform admin, go to the [configuration screen](https://app.valencesecurity.com/settings/configuration), click Connect in the SIEM Integration card, and choose Microsoft Sentinel. Paste the values from the previous step and click Connect. Valence will test the connection so when success is reported, the connection worked.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Valence%20Security/Data%20Connectors/ValenceSecurity.json","true" +"varonisresources_CL","Varonis Purview","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Varonis%20Purview","varonis","azure-sentinel-solution-varonispurview","2025-10-27","2025-10-01","","Varonis","Partner","https://www.varonis.com/resources/support","","domains","VaronisPurviewPush","Varonis","Varonis Purview Push Connector","The [Varonis Purview](https://www.varonis.com/) connector provides the capability to sync resources from Varonis to Microsoft Purview.","[{""title"": ""1. Run this to setup ingestion for Varonis Resoources"", ""description"": ""This will create the necessary Log Analytics tables, Data Collection Rule (DCR), and an Entra application to securely send data to the DCR."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated Configuration and Secure Data Ingestion with Entra Application \nClicking on \""Deploy\"" will trigger the creation of Log Analytics tables and a Data Collection Rule (DCR). \nIt will then create an Entra application, link the DCR to it, and set the entered secret in the application. This setup enables data to be sent securely to the DCR using an Entra token.""}}, {""parameters"": {""label"": ""Deploy Varonis connector resources"", ""applicationDisplayName"": ""Varonis Purview Connector Application""}, ""type"": ""DeployPushConnectorButton""}]}, {""title"": ""2. Push your logs into the workspace"", ""description"": ""Use the following parameters to configure the Varonis Purview Connector in your Varonis integrations dashboard."", ""instructions"": [{""parameters"": {""label"": ""Tenant ID (Directory ID)"", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Application ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the App Registration Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Secret"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the App Registration Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Endpoint Uri"", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the Data Collection Endpoint Uri""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Rule Immutable ID"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the Data Collection Rule Immutable ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Resources Stream Name"", ""value"": ""Custom-varonisresources""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rule (DCR). Typically requires Azure RBAC Owner or User Access Administrator role""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Varonis%20Purview/Data%20Connectors/VaronisPurview_ccp/VaronisPurview_connectorDefinition.json","true" +"VaronisAlerts_CL","VaronisSaaS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VaronisSaaS","varonis","microsoft-sentinel-solution-varonissaas","2023-11-10","2023-11-10","","Varonis","Partner","https://www.varonis.com/resources/support","","domains","VaronisSaaS","Varonis","Varonis SaaS","Varonis SaaS provides the capability to ingest [Varonis Alerts](https://www.varonis.com/products/datalert) into Microsoft Sentinel.

Varonis prioritizes deep data visibility, classification capabilities, and automated remediation for data access. Varonis builds a single prioritized view of risk for your data, so you can proactively and systematically eliminate risk from insider threats and cyberattacks.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Varonis DatAlert service to pull alerts into Microsoft Sentinel. This might result in additional data ingestion costs. See the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**For Azure function and related services installation use:**\n\n [![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVaronisSaaS%2FData%2520Connectors%2Fazuredeploy.json)""}, {""title"": """", ""description"": ""STEP 1 - Obtain the Varonis DatAlert Endpoint API credentials.\n\n To generate the Client ID and API key:\n 1. Launch the Varonis Web Interface.\n 2. Navigate to Configuration -> API Keys. The API Keys page is displayed.\n 3. Click Create API Key. The Add New API Key settings are displayed on the right.\n 4. Fill in the name and description.\n 5. Click the Generate Key button.\n 6. Copy the API key secret and save it in a handy location. You won't be able to copy it again.\n\nFor additional information, please check: [Varonis Documentation](https://help.varonis.com/s/document-item?bundleId=ami1661784208197&topicId=emp1703144742927.html&_LANG=enus)""}, {""title"": """", ""description"": ""STEP 2 - Deploy the connector and the associated Azure Function."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""Use this method for automated deployment of the data connector using an ARM Template.\n\n1. Click the Deploy to Azure button. \n\n\t[![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVaronisSaaS%2FData%2520Connectors%2Fazuredeploy.json)\n2. Select the preferred Subscription, Resource Group, Region, Storage Account Type.\n3. Enter Log Analytics Workspace Name, Varonis FQDN, Varonis SaaS API Key.\n4. Click Review + Create, Create.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VaronisSaaS/Data%20Connectors/VaronisSaaS_API_FunctionApp.json","true" +"CommonSecurityLog","Vectra AI Detect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Detect","vectraaiinc","ai_vectra_detect_mss","2022-05-24","2023-04-17","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","AIVectraDetect","Vectra AI","[Deprecated] Vectra AI Detect via Legacy Agent","The AI Vectra Detect connector allows users to connect Vectra Detect logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives users more insight into their organization's network and improves their security operation capabilities.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 over TCP, UDP or TLS.\n\n> 1. Make sure that you have Python on your machine using the following command: python --version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward AI Vectra Detect logs to Syslog agent in CEF format"", ""description"": ""Configure Vectra (X Series) Agent to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent.\n\nFrom the Vectra UI, navigate to Settings > Notifications and Edit Syslog configuration. Follow below instructions to set up the connection:\n\n- Add a new Destination (which is the host where the Microsoft Sentinel Syslog Agent is running)\n\n- Set the Port as **514**\n\n- Set the Protocol as **UDP**\n\n- Set the format to **CEF**\n\n- Set Log types (Select all log types available)\n\n- Click on **Save**\n\nUser can click the **Test** button to force send some test events.\n\n For more information, refer to Cognito Detect Syslog Guide which can be downloaded from the ressource page in Detect UI.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python --version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Detect/Data%20Connectors/AIVectraDetect.json","true" +"CommonSecurityLog","Vectra AI Detect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Detect","vectraaiinc","ai_vectra_detect_mss","2022-05-24","2023-04-17","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","AIVectraDetectAma","Vectra AI","[Deprecated] Vectra AI Detect via AMA","The AI Vectra Detect connector allows users to connect Vectra Detect logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives users more insight into their organization's network and improves their security operation capabilities.","[{""title"": """", ""description"": """", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine""}, {""title"": ""Step B. Forward AI Vectra Detect logs to Syslog agent in CEF format"", ""description"": ""Configure Vectra (X Series) Agent to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent.\n\nFrom the Vectra UI, navigate to Settings > Notifications and Edit Syslog configuration. Follow below instructions to set up the connection:\n\n- Add a new Destination (which is the host where the Microsoft Sentinel Syslog Agent is running)\n\n- Set the Port as **514**\n\n- Set the Protocol as **UDP**\n\n- Set the format to **CEF**\n\n- Set Log types (Select all log types available)\n\n- Click on **Save**\n\nUser can click the **Test** button to force send some test events.\n\n For more information, refer to Cognito Detect Syslog Guide which can be downloaded from the ressource page in Detect UI.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Detect/Data%20Connectors/template_AIVectraDetectAma.json","true" +"VectraStream","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","AIVectraStream","Vectra AI","AI Vectra Stream via Legacy Agent","The AI Vectra Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected **VectraStream** which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Install the Linux agent on sperate Linux instance.\n\n> Logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Follow the configuration steps below to get Vectra Stream metadata into Microsoft Sentinel. The Log Analytics agent is leveraged to send custom JSON into Azure Monitor, enabling the storage of the metadata into a custom table. For more information, refer to the [Azure Monitor Documentation](https://docs.microsoft.com/azure/azure-monitor/agents/data-sources-json).\n1. Download config file for the log analytics agent: VectraStream.conf (located in the Connector folder within the Vectra solution: https://aka.ms/sentinel-aivectrastream-conf).\n2. Login to the server where you have installed Azure Log Analytics agent.\n3. Copy VectraStream.conf to the /etc/opt/microsoft/omsagent/**workspace_id**/conf/omsagent.d/ folder.\n4. Edit VectraStream.conf as follows:\n\n\t i. configure an alternate port to send data to, if desired. Default port is 29009.\n\n\t ii. replace **workspace_id** with real value of your Workspace ID.\n5. Save changes and restart the Azure Log Analytics agent for Linux service with the following command:\n\t\tsudo /opt/microsoft/omsagent/bin/service_control restart"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Configure and connect Vectra AI Stream"", ""description"": ""Configure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via the Log Analytics Agent.\n\nFrom the Vectra UI, navigate to Settings > Cognito Stream and Edit the destination configuration:\n\n- Select Publisher: RAW JSON\n\n- Set the server IP or hostname (which is the host which run the Log Analytics Agent)\n\n- Set all the port to **29009** (this port can be modified if required)\n\n- Save\n\n- Set Log types (Select all log types available)\n\n- Click on **Save**\n\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Brain"", ""description"": ""must be configured to export Stream metadata in JSON""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/Connector_VectraAI_Stream.json","true" +"VectraStream_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","AIVectraStream","Vectra AI","AI Vectra Stream via Legacy Agent","The AI Vectra Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected **VectraStream** which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Install the Linux agent on sperate Linux instance.\n\n> Logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Follow the configuration steps below to get Vectra Stream metadata into Microsoft Sentinel. The Log Analytics agent is leveraged to send custom JSON into Azure Monitor, enabling the storage of the metadata into a custom table. For more information, refer to the [Azure Monitor Documentation](https://docs.microsoft.com/azure/azure-monitor/agents/data-sources-json).\n1. Download config file for the log analytics agent: VectraStream.conf (located in the Connector folder within the Vectra solution: https://aka.ms/sentinel-aivectrastream-conf).\n2. Login to the server where you have installed Azure Log Analytics agent.\n3. Copy VectraStream.conf to the /etc/opt/microsoft/omsagent/**workspace_id**/conf/omsagent.d/ folder.\n4. Edit VectraStream.conf as follows:\n\n\t i. configure an alternate port to send data to, if desired. Default port is 29009.\n\n\t ii. replace **workspace_id** with real value of your Workspace ID.\n5. Save changes and restart the Azure Log Analytics agent for Linux service with the following command:\n\t\tsudo /opt/microsoft/omsagent/bin/service_control restart"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Configure and connect Vectra AI Stream"", ""description"": ""Configure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via the Log Analytics Agent.\n\nFrom the Vectra UI, navigate to Settings > Cognito Stream and Edit the destination configuration:\n\n- Select Publisher: RAW JSON\n\n- Set the server IP or hostname (which is the host which run the Log Analytics Agent)\n\n- Set all the port to **29009** (this port can be modified if required)\n\n- Save\n\n- Set Log types (Select all log types available)\n\n- Click on **Save**\n\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Brain"", ""description"": ""must be configured to export Stream metadata in JSON""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/Connector_VectraAI_Stream.json","true" +"vectra_beacon_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" +"vectra_dcerpc_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" +"vectra_dhcp_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" +"vectra_dns_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" +"vectra_http_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" +"vectra_isession_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" +"vectra_kerberos_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" +"vectra_ldap_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" +"vectra_ntlm_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" +"vectra_radius_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" +"vectra_rdp_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" +"vectra_smbfiles_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" +"vectra_smbmapping_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" +"vectra_smtp_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" +"vectra_ssh_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" +"vectra_ssl_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" +"vectra_x509_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" +"Audits_Data_CL","Vectra XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR","vectraaiinc","vectra-xdr-for-microsoft-sentinel","2023-07-04","2024-08-01","","Vectra Support","Partner","https://www.vectra.ai/support","","domains","VectraXDR","Vectra","Vectra XDR","The [Vectra XDR](https://www.vectra.ai/) connector gives the capability to ingest Vectra Detections, Audits, Entity Scoring, Lockdown, Health and Entities data into Microsoft Sentinel through the Vectra REST API. Refer to the API documentation: `https://support.vectra.ai/s/article/KB-VS-1666` for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Vectra API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Follow these steps for [Detections Parser](https://aka.ms/sentinel-VectraDetections-parser), [Audits Parser](https://aka.ms/sentinel-VectraAudits-parser), [Entity Scoring Parser](https://aka.ms/sentinel-VectraEntityScoring-parser), [Lockdown Parser](https://aka.ms/sentinel-VectraLockdown-parser) and [Health Parser](https://aka.ms/sentinel-VectraHealth-parser) to create the Kusto functions alias, **VectraDetections**, **VectraAudits**, **VectraEntityScoring**, **VectraLockdown** and **VectraHealth**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Vectra API Credentials**\n\n Follow these instructions to create a Vectra Client ID and Client Secret.\n 1. Log into your Vectra portal\n 2. Navigate to Manage -> API Clients\n 3. From the API Clients page, select 'Add API Client' to create a new client.\n 4. Add Client Name, select Role and click on Generate Credentials to obtain your client credentials. \n 5. Be sure to record your Client ID and Secret Key for safekeeping. You will need these two pieces of information to obtain an access token from the Vectra API. An access token is required to make requests to all of the Vectra API endpoints.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Vectra Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Vectra Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Vectra Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Create a Keyvault**\n\n Follow these instructions to create a new Keyvault.\n 1. In the Azure portal, Go to **Key vaults** and click on Create.\n 2. Select Subsciption, Resource Group and provide unique name of keyvault.""}, {""title"": """", ""description"": ""**STEP 7 - Create Access Policy in Keyvault**\n\n Follow these instructions to create access policy in Keyvault.\n 1. Go to keyvaults, select your keyvault, go to Access policies on left side panel, click on create.\n 2. Select all keys & secrets permissions. Click next.\n 3. In the principal section, search by application name which was generated in STEP - 2. Click next.\n\n **Note: **Ensure the Permission model in the Access Configuration of Key Vault is set to **'Vault access policy'**""}, {""title"": """", ""description"": ""**STEP 8 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Vectra data connector, have the Vectra API Authorization Credentials readily available..""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Vectra connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace Name \n\t\tVectra Base URL (https://) \n\t\tVectra Client Id - Health \n\t\tVectra Client Secret Key - Health \n\t\tVectra Client Id - Entity Scoring \n\t\tVectra Client Secret - Entity Scoring \n\t\tVectra Client Id - Detections \n\t\tVectra Client Secret - Detections \n\t\tVectra Client Id - Audits \n\t\tVectra Client Secret - Audits \n\t\tVectra Client Id - Lockdown \n\t\tVectra Client Secret - Lockdown \n\t\tVectra Client Id - Host-Entity \n\t\tVectra Client Secret - Host-Entity \n\t\tVectra Client Id - Account-Entity \n\t\tVectra Client Secret - Account-Entity \n\t\tKey Vault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tAzure Entra ObjectID \n\t\tStartTime (in MM/DD/YYYY HH:MM:SS Format) \n\t\tInclude Score Decrease \n\t\tAudits Table Name \n\t\tDetections Table Name \n\t\tEntity Scoring Table Name \n\t\tLockdown Table Name \n\t\tHealth Table Name \n\t\tEntities Table Name \n\t\tExclude Group Details From Detections\n\t\tLog Level (Default: INFO) \n\t\tLockdown Schedule \n\t\tHealth Schedule \n\t\tDetections Schedule \n\t\tAudits Schedule \n\t\tEntity Scoring Schedule \n\t\tEntities Schedule \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Vectra data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-VectraXDR320-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. VECTRAXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tVectra Base URL (https://) \n\t\tVectra Client Id - Health \n\t\tVectra Client Secret Key - Health \n\t\tVectra Client Id - Entity Scoring \n\t\tVectra Client Secret - Entity Scoring \n\t\tVectra Client Id - Detections \n\t\tVectra Client Secret - Detections \n\t\tVectra Client Id - Audits \n\t\tVectra Client Secret - Audits \n\t\tVectra Client Id - Lockdown \n\t\tVectra Client Secret - Lockdown \n\t\tVectra Client Id - Host-Entity \n\t\tVectra Client Secret - Host-Entity \n\t\tVectra Client Id - Account-Entity \n\t\tVectra Client Secret - Account-Entity \n\t\tKey Vault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tStartTime (in MM/DD/YYYY HH:MM:SS Format) \n\t\tInclude Score Decrease \n\t\tAudits Table Name \n\t\tDetections Table Name \n\t\tEntity Scoring Table Name \n\t\tLockdown Table Name \n\t\tHealth Table Name \n\t\tEntities Table Name \n\t\tLog Level (Default: INFO) \n\t\tLockdown Schedule \n\t\tHealth Schedule \n\t\tDetections Schedule \n\t\tAudits Schedule \n\t\tEntity Scoring Schedule \n\t\tEntities Schedule \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Vectra Client ID** and **Client Secret** is required for Health, Entity Scoring, Entities, Detections, Lockdown and Audit data collection. See the documentation to learn more about API on the `https://support.vectra.ai/s/article/KB-VS-1666`.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR/Data%20Connectors/VectraDataConnector/VectraXDR_API_FunctionApp.json","true" +"Detections_Data_CL","Vectra XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR","vectraaiinc","vectra-xdr-for-microsoft-sentinel","2023-07-04","2024-08-01","","Vectra Support","Partner","https://www.vectra.ai/support","","domains","VectraXDR","Vectra","Vectra XDR","The [Vectra XDR](https://www.vectra.ai/) connector gives the capability to ingest Vectra Detections, Audits, Entity Scoring, Lockdown, Health and Entities data into Microsoft Sentinel through the Vectra REST API. Refer to the API documentation: `https://support.vectra.ai/s/article/KB-VS-1666` for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Vectra API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Follow these steps for [Detections Parser](https://aka.ms/sentinel-VectraDetections-parser), [Audits Parser](https://aka.ms/sentinel-VectraAudits-parser), [Entity Scoring Parser](https://aka.ms/sentinel-VectraEntityScoring-parser), [Lockdown Parser](https://aka.ms/sentinel-VectraLockdown-parser) and [Health Parser](https://aka.ms/sentinel-VectraHealth-parser) to create the Kusto functions alias, **VectraDetections**, **VectraAudits**, **VectraEntityScoring**, **VectraLockdown** and **VectraHealth**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Vectra API Credentials**\n\n Follow these instructions to create a Vectra Client ID and Client Secret.\n 1. Log into your Vectra portal\n 2. Navigate to Manage -> API Clients\n 3. From the API Clients page, select 'Add API Client' to create a new client.\n 4. Add Client Name, select Role and click on Generate Credentials to obtain your client credentials. \n 5. Be sure to record your Client ID and Secret Key for safekeeping. You will need these two pieces of information to obtain an access token from the Vectra API. An access token is required to make requests to all of the Vectra API endpoints.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Vectra Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Vectra Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Vectra Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Create a Keyvault**\n\n Follow these instructions to create a new Keyvault.\n 1. In the Azure portal, Go to **Key vaults** and click on Create.\n 2. Select Subsciption, Resource Group and provide unique name of keyvault.""}, {""title"": """", ""description"": ""**STEP 7 - Create Access Policy in Keyvault**\n\n Follow these instructions to create access policy in Keyvault.\n 1. Go to keyvaults, select your keyvault, go to Access policies on left side panel, click on create.\n 2. Select all keys & secrets permissions. Click next.\n 3. In the principal section, search by application name which was generated in STEP - 2. Click next.\n\n **Note: **Ensure the Permission model in the Access Configuration of Key Vault is set to **'Vault access policy'**""}, {""title"": """", ""description"": ""**STEP 8 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Vectra data connector, have the Vectra API Authorization Credentials readily available..""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Vectra connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace Name \n\t\tVectra Base URL (https://) \n\t\tVectra Client Id - Health \n\t\tVectra Client Secret Key - Health \n\t\tVectra Client Id - Entity Scoring \n\t\tVectra Client Secret - Entity Scoring \n\t\tVectra Client Id - Detections \n\t\tVectra Client Secret - Detections \n\t\tVectra Client Id - Audits \n\t\tVectra Client Secret - Audits \n\t\tVectra Client Id - Lockdown \n\t\tVectra Client Secret - Lockdown \n\t\tVectra Client Id - Host-Entity \n\t\tVectra Client Secret - Host-Entity \n\t\tVectra Client Id - Account-Entity \n\t\tVectra Client Secret - Account-Entity \n\t\tKey Vault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tAzure Entra ObjectID \n\t\tStartTime (in MM/DD/YYYY HH:MM:SS Format) \n\t\tInclude Score Decrease \n\t\tAudits Table Name \n\t\tDetections Table Name \n\t\tEntity Scoring Table Name \n\t\tLockdown Table Name \n\t\tHealth Table Name \n\t\tEntities Table Name \n\t\tExclude Group Details From Detections\n\t\tLog Level (Default: INFO) \n\t\tLockdown Schedule \n\t\tHealth Schedule \n\t\tDetections Schedule \n\t\tAudits Schedule \n\t\tEntity Scoring Schedule \n\t\tEntities Schedule \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Vectra data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-VectraXDR320-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. VECTRAXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tVectra Base URL (https://) \n\t\tVectra Client Id - Health \n\t\tVectra Client Secret Key - Health \n\t\tVectra Client Id - Entity Scoring \n\t\tVectra Client Secret - Entity Scoring \n\t\tVectra Client Id - Detections \n\t\tVectra Client Secret - Detections \n\t\tVectra Client Id - Audits \n\t\tVectra Client Secret - Audits \n\t\tVectra Client Id - Lockdown \n\t\tVectra Client Secret - Lockdown \n\t\tVectra Client Id - Host-Entity \n\t\tVectra Client Secret - Host-Entity \n\t\tVectra Client Id - Account-Entity \n\t\tVectra Client Secret - Account-Entity \n\t\tKey Vault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tStartTime (in MM/DD/YYYY HH:MM:SS Format) \n\t\tInclude Score Decrease \n\t\tAudits Table Name \n\t\tDetections Table Name \n\t\tEntity Scoring Table Name \n\t\tLockdown Table Name \n\t\tHealth Table Name \n\t\tEntities Table Name \n\t\tLog Level (Default: INFO) \n\t\tLockdown Schedule \n\t\tHealth Schedule \n\t\tDetections Schedule \n\t\tAudits Schedule \n\t\tEntity Scoring Schedule \n\t\tEntities Schedule \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Vectra Client ID** and **Client Secret** is required for Health, Entity Scoring, Entities, Detections, Lockdown and Audit data collection. See the documentation to learn more about API on the `https://support.vectra.ai/s/article/KB-VS-1666`.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR/Data%20Connectors/VectraDataConnector/VectraXDR_API_FunctionApp.json","true" +"Entities_Data_CL","Vectra XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR","vectraaiinc","vectra-xdr-for-microsoft-sentinel","2023-07-04","2024-08-01","","Vectra Support","Partner","https://www.vectra.ai/support","","domains","VectraXDR","Vectra","Vectra XDR","The [Vectra XDR](https://www.vectra.ai/) connector gives the capability to ingest Vectra Detections, Audits, Entity Scoring, Lockdown, Health and Entities data into Microsoft Sentinel through the Vectra REST API. Refer to the API documentation: `https://support.vectra.ai/s/article/KB-VS-1666` for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Vectra API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Follow these steps for [Detections Parser](https://aka.ms/sentinel-VectraDetections-parser), [Audits Parser](https://aka.ms/sentinel-VectraAudits-parser), [Entity Scoring Parser](https://aka.ms/sentinel-VectraEntityScoring-parser), [Lockdown Parser](https://aka.ms/sentinel-VectraLockdown-parser) and [Health Parser](https://aka.ms/sentinel-VectraHealth-parser) to create the Kusto functions alias, **VectraDetections**, **VectraAudits**, **VectraEntityScoring**, **VectraLockdown** and **VectraHealth**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Vectra API Credentials**\n\n Follow these instructions to create a Vectra Client ID and Client Secret.\n 1. Log into your Vectra portal\n 2. Navigate to Manage -> API Clients\n 3. From the API Clients page, select 'Add API Client' to create a new client.\n 4. Add Client Name, select Role and click on Generate Credentials to obtain your client credentials. \n 5. Be sure to record your Client ID and Secret Key for safekeeping. You will need these two pieces of information to obtain an access token from the Vectra API. An access token is required to make requests to all of the Vectra API endpoints.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Vectra Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Vectra Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Vectra Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Create a Keyvault**\n\n Follow these instructions to create a new Keyvault.\n 1. In the Azure portal, Go to **Key vaults** and click on Create.\n 2. Select Subsciption, Resource Group and provide unique name of keyvault.""}, {""title"": """", ""description"": ""**STEP 7 - Create Access Policy in Keyvault**\n\n Follow these instructions to create access policy in Keyvault.\n 1. Go to keyvaults, select your keyvault, go to Access policies on left side panel, click on create.\n 2. Select all keys & secrets permissions. Click next.\n 3. In the principal section, search by application name which was generated in STEP - 2. Click next.\n\n **Note: **Ensure the Permission model in the Access Configuration of Key Vault is set to **'Vault access policy'**""}, {""title"": """", ""description"": ""**STEP 8 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Vectra data connector, have the Vectra API Authorization Credentials readily available..""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Vectra connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace Name \n\t\tVectra Base URL (https://) \n\t\tVectra Client Id - Health \n\t\tVectra Client Secret Key - Health \n\t\tVectra Client Id - Entity Scoring \n\t\tVectra Client Secret - Entity Scoring \n\t\tVectra Client Id - Detections \n\t\tVectra Client Secret - Detections \n\t\tVectra Client Id - Audits \n\t\tVectra Client Secret - Audits \n\t\tVectra Client Id - Lockdown \n\t\tVectra Client Secret - Lockdown \n\t\tVectra Client Id - Host-Entity \n\t\tVectra Client Secret - Host-Entity \n\t\tVectra Client Id - Account-Entity \n\t\tVectra Client Secret - Account-Entity \n\t\tKey Vault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tAzure Entra ObjectID \n\t\tStartTime (in MM/DD/YYYY HH:MM:SS Format) \n\t\tInclude Score Decrease \n\t\tAudits Table Name \n\t\tDetections Table Name \n\t\tEntity Scoring Table Name \n\t\tLockdown Table Name \n\t\tHealth Table Name \n\t\tEntities Table Name \n\t\tExclude Group Details From Detections\n\t\tLog Level (Default: INFO) \n\t\tLockdown Schedule \n\t\tHealth Schedule \n\t\tDetections Schedule \n\t\tAudits Schedule \n\t\tEntity Scoring Schedule \n\t\tEntities Schedule \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Vectra data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-VectraXDR320-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. VECTRAXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tVectra Base URL (https://) \n\t\tVectra Client Id - Health \n\t\tVectra Client Secret Key - Health \n\t\tVectra Client Id - Entity Scoring \n\t\tVectra Client Secret - Entity Scoring \n\t\tVectra Client Id - Detections \n\t\tVectra Client Secret - Detections \n\t\tVectra Client Id - Audits \n\t\tVectra Client Secret - Audits \n\t\tVectra Client Id - Lockdown \n\t\tVectra Client Secret - Lockdown \n\t\tVectra Client Id - Host-Entity \n\t\tVectra Client Secret - Host-Entity \n\t\tVectra Client Id - Account-Entity \n\t\tVectra Client Secret - Account-Entity \n\t\tKey Vault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tStartTime (in MM/DD/YYYY HH:MM:SS Format) \n\t\tInclude Score Decrease \n\t\tAudits Table Name \n\t\tDetections Table Name \n\t\tEntity Scoring Table Name \n\t\tLockdown Table Name \n\t\tHealth Table Name \n\t\tEntities Table Name \n\t\tLog Level (Default: INFO) \n\t\tLockdown Schedule \n\t\tHealth Schedule \n\t\tDetections Schedule \n\t\tAudits Schedule \n\t\tEntity Scoring Schedule \n\t\tEntities Schedule \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Vectra Client ID** and **Client Secret** is required for Health, Entity Scoring, Entities, Detections, Lockdown and Audit data collection. See the documentation to learn more about API on the `https://support.vectra.ai/s/article/KB-VS-1666`.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR/Data%20Connectors/VectraDataConnector/VectraXDR_API_FunctionApp.json","true" +"Entity_Scoring_Data_CL","Vectra XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR","vectraaiinc","vectra-xdr-for-microsoft-sentinel","2023-07-04","2024-08-01","","Vectra Support","Partner","https://www.vectra.ai/support","","domains","VectraXDR","Vectra","Vectra XDR","The [Vectra XDR](https://www.vectra.ai/) connector gives the capability to ingest Vectra Detections, Audits, Entity Scoring, Lockdown, Health and Entities data into Microsoft Sentinel through the Vectra REST API. Refer to the API documentation: `https://support.vectra.ai/s/article/KB-VS-1666` for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Vectra API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Follow these steps for [Detections Parser](https://aka.ms/sentinel-VectraDetections-parser), [Audits Parser](https://aka.ms/sentinel-VectraAudits-parser), [Entity Scoring Parser](https://aka.ms/sentinel-VectraEntityScoring-parser), [Lockdown Parser](https://aka.ms/sentinel-VectraLockdown-parser) and [Health Parser](https://aka.ms/sentinel-VectraHealth-parser) to create the Kusto functions alias, **VectraDetections**, **VectraAudits**, **VectraEntityScoring**, **VectraLockdown** and **VectraHealth**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Vectra API Credentials**\n\n Follow these instructions to create a Vectra Client ID and Client Secret.\n 1. Log into your Vectra portal\n 2. Navigate to Manage -> API Clients\n 3. From the API Clients page, select 'Add API Client' to create a new client.\n 4. Add Client Name, select Role and click on Generate Credentials to obtain your client credentials. \n 5. Be sure to record your Client ID and Secret Key for safekeeping. You will need these two pieces of information to obtain an access token from the Vectra API. An access token is required to make requests to all of the Vectra API endpoints.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Vectra Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Vectra Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Vectra Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Create a Keyvault**\n\n Follow these instructions to create a new Keyvault.\n 1. In the Azure portal, Go to **Key vaults** and click on Create.\n 2. Select Subsciption, Resource Group and provide unique name of keyvault.""}, {""title"": """", ""description"": ""**STEP 7 - Create Access Policy in Keyvault**\n\n Follow these instructions to create access policy in Keyvault.\n 1. Go to keyvaults, select your keyvault, go to Access policies on left side panel, click on create.\n 2. Select all keys & secrets permissions. Click next.\n 3. In the principal section, search by application name which was generated in STEP - 2. Click next.\n\n **Note: **Ensure the Permission model in the Access Configuration of Key Vault is set to **'Vault access policy'**""}, {""title"": """", ""description"": ""**STEP 8 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Vectra data connector, have the Vectra API Authorization Credentials readily available..""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Vectra connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace Name \n\t\tVectra Base URL (https://) \n\t\tVectra Client Id - Health \n\t\tVectra Client Secret Key - Health \n\t\tVectra Client Id - Entity Scoring \n\t\tVectra Client Secret - Entity Scoring \n\t\tVectra Client Id - Detections \n\t\tVectra Client Secret - Detections \n\t\tVectra Client Id - Audits \n\t\tVectra Client Secret - Audits \n\t\tVectra Client Id - Lockdown \n\t\tVectra Client Secret - Lockdown \n\t\tVectra Client Id - Host-Entity \n\t\tVectra Client Secret - Host-Entity \n\t\tVectra Client Id - Account-Entity \n\t\tVectra Client Secret - Account-Entity \n\t\tKey Vault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tAzure Entra ObjectID \n\t\tStartTime (in MM/DD/YYYY HH:MM:SS Format) \n\t\tInclude Score Decrease \n\t\tAudits Table Name \n\t\tDetections Table Name \n\t\tEntity Scoring Table Name \n\t\tLockdown Table Name \n\t\tHealth Table Name \n\t\tEntities Table Name \n\t\tExclude Group Details From Detections\n\t\tLog Level (Default: INFO) \n\t\tLockdown Schedule \n\t\tHealth Schedule \n\t\tDetections Schedule \n\t\tAudits Schedule \n\t\tEntity Scoring Schedule \n\t\tEntities Schedule \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Vectra data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-VectraXDR320-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. VECTRAXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tVectra Base URL (https://) \n\t\tVectra Client Id - Health \n\t\tVectra Client Secret Key - Health \n\t\tVectra Client Id - Entity Scoring \n\t\tVectra Client Secret - Entity Scoring \n\t\tVectra Client Id - Detections \n\t\tVectra Client Secret - Detections \n\t\tVectra Client Id - Audits \n\t\tVectra Client Secret - Audits \n\t\tVectra Client Id - Lockdown \n\t\tVectra Client Secret - Lockdown \n\t\tVectra Client Id - Host-Entity \n\t\tVectra Client Secret - Host-Entity \n\t\tVectra Client Id - Account-Entity \n\t\tVectra Client Secret - Account-Entity \n\t\tKey Vault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tStartTime (in MM/DD/YYYY HH:MM:SS Format) \n\t\tInclude Score Decrease \n\t\tAudits Table Name \n\t\tDetections Table Name \n\t\tEntity Scoring Table Name \n\t\tLockdown Table Name \n\t\tHealth Table Name \n\t\tEntities Table Name \n\t\tLog Level (Default: INFO) \n\t\tLockdown Schedule \n\t\tHealth Schedule \n\t\tDetections Schedule \n\t\tAudits Schedule \n\t\tEntity Scoring Schedule \n\t\tEntities Schedule \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Vectra Client ID** and **Client Secret** is required for Health, Entity Scoring, Entities, Detections, Lockdown and Audit data collection. See the documentation to learn more about API on the `https://support.vectra.ai/s/article/KB-VS-1666`.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR/Data%20Connectors/VectraDataConnector/VectraXDR_API_FunctionApp.json","true" +"Health_Data_CL","Vectra XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR","vectraaiinc","vectra-xdr-for-microsoft-sentinel","2023-07-04","2024-08-01","","Vectra Support","Partner","https://www.vectra.ai/support","","domains","VectraXDR","Vectra","Vectra XDR","The [Vectra XDR](https://www.vectra.ai/) connector gives the capability to ingest Vectra Detections, Audits, Entity Scoring, Lockdown, Health and Entities data into Microsoft Sentinel through the Vectra REST API. Refer to the API documentation: `https://support.vectra.ai/s/article/KB-VS-1666` for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Vectra API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Follow these steps for [Detections Parser](https://aka.ms/sentinel-VectraDetections-parser), [Audits Parser](https://aka.ms/sentinel-VectraAudits-parser), [Entity Scoring Parser](https://aka.ms/sentinel-VectraEntityScoring-parser), [Lockdown Parser](https://aka.ms/sentinel-VectraLockdown-parser) and [Health Parser](https://aka.ms/sentinel-VectraHealth-parser) to create the Kusto functions alias, **VectraDetections**, **VectraAudits**, **VectraEntityScoring**, **VectraLockdown** and **VectraHealth**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Vectra API Credentials**\n\n Follow these instructions to create a Vectra Client ID and Client Secret.\n 1. Log into your Vectra portal\n 2. Navigate to Manage -> API Clients\n 3. From the API Clients page, select 'Add API Client' to create a new client.\n 4. Add Client Name, select Role and click on Generate Credentials to obtain your client credentials. \n 5. Be sure to record your Client ID and Secret Key for safekeeping. You will need these two pieces of information to obtain an access token from the Vectra API. An access token is required to make requests to all of the Vectra API endpoints.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Vectra Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Vectra Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Vectra Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Create a Keyvault**\n\n Follow these instructions to create a new Keyvault.\n 1. In the Azure portal, Go to **Key vaults** and click on Create.\n 2. Select Subsciption, Resource Group and provide unique name of keyvault.""}, {""title"": """", ""description"": ""**STEP 7 - Create Access Policy in Keyvault**\n\n Follow these instructions to create access policy in Keyvault.\n 1. Go to keyvaults, select your keyvault, go to Access policies on left side panel, click on create.\n 2. Select all keys & secrets permissions. Click next.\n 3. In the principal section, search by application name which was generated in STEP - 2. Click next.\n\n **Note: **Ensure the Permission model in the Access Configuration of Key Vault is set to **'Vault access policy'**""}, {""title"": """", ""description"": ""**STEP 8 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Vectra data connector, have the Vectra API Authorization Credentials readily available..""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Vectra connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace Name \n\t\tVectra Base URL (https://) \n\t\tVectra Client Id - Health \n\t\tVectra Client Secret Key - Health \n\t\tVectra Client Id - Entity Scoring \n\t\tVectra Client Secret - Entity Scoring \n\t\tVectra Client Id - Detections \n\t\tVectra Client Secret - Detections \n\t\tVectra Client Id - Audits \n\t\tVectra Client Secret - Audits \n\t\tVectra Client Id - Lockdown \n\t\tVectra Client Secret - Lockdown \n\t\tVectra Client Id - Host-Entity \n\t\tVectra Client Secret - Host-Entity \n\t\tVectra Client Id - Account-Entity \n\t\tVectra Client Secret - Account-Entity \n\t\tKey Vault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tAzure Entra ObjectID \n\t\tStartTime (in MM/DD/YYYY HH:MM:SS Format) \n\t\tInclude Score Decrease \n\t\tAudits Table Name \n\t\tDetections Table Name \n\t\tEntity Scoring Table Name \n\t\tLockdown Table Name \n\t\tHealth Table Name \n\t\tEntities Table Name \n\t\tExclude Group Details From Detections\n\t\tLog Level (Default: INFO) \n\t\tLockdown Schedule \n\t\tHealth Schedule \n\t\tDetections Schedule \n\t\tAudits Schedule \n\t\tEntity Scoring Schedule \n\t\tEntities Schedule \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Vectra data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-VectraXDR320-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. VECTRAXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tVectra Base URL (https://) \n\t\tVectra Client Id - Health \n\t\tVectra Client Secret Key - Health \n\t\tVectra Client Id - Entity Scoring \n\t\tVectra Client Secret - Entity Scoring \n\t\tVectra Client Id - Detections \n\t\tVectra Client Secret - Detections \n\t\tVectra Client Id - Audits \n\t\tVectra Client Secret - Audits \n\t\tVectra Client Id - Lockdown \n\t\tVectra Client Secret - Lockdown \n\t\tVectra Client Id - Host-Entity \n\t\tVectra Client Secret - Host-Entity \n\t\tVectra Client Id - Account-Entity \n\t\tVectra Client Secret - Account-Entity \n\t\tKey Vault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tStartTime (in MM/DD/YYYY HH:MM:SS Format) \n\t\tInclude Score Decrease \n\t\tAudits Table Name \n\t\tDetections Table Name \n\t\tEntity Scoring Table Name \n\t\tLockdown Table Name \n\t\tHealth Table Name \n\t\tEntities Table Name \n\t\tLog Level (Default: INFO) \n\t\tLockdown Schedule \n\t\tHealth Schedule \n\t\tDetections Schedule \n\t\tAudits Schedule \n\t\tEntity Scoring Schedule \n\t\tEntities Schedule \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Vectra Client ID** and **Client Secret** is required for Health, Entity Scoring, Entities, Detections, Lockdown and Audit data collection. See the documentation to learn more about API on the `https://support.vectra.ai/s/article/KB-VS-1666`.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR/Data%20Connectors/VectraDataConnector/VectraXDR_API_FunctionApp.json","true" +"Lockdown_Data_CL","Vectra XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR","vectraaiinc","vectra-xdr-for-microsoft-sentinel","2023-07-04","2024-08-01","","Vectra Support","Partner","https://www.vectra.ai/support","","domains","VectraXDR","Vectra","Vectra XDR","The [Vectra XDR](https://www.vectra.ai/) connector gives the capability to ingest Vectra Detections, Audits, Entity Scoring, Lockdown, Health and Entities data into Microsoft Sentinel through the Vectra REST API. Refer to the API documentation: `https://support.vectra.ai/s/article/KB-VS-1666` for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Vectra API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Follow these steps for [Detections Parser](https://aka.ms/sentinel-VectraDetections-parser), [Audits Parser](https://aka.ms/sentinel-VectraAudits-parser), [Entity Scoring Parser](https://aka.ms/sentinel-VectraEntityScoring-parser), [Lockdown Parser](https://aka.ms/sentinel-VectraLockdown-parser) and [Health Parser](https://aka.ms/sentinel-VectraHealth-parser) to create the Kusto functions alias, **VectraDetections**, **VectraAudits**, **VectraEntityScoring**, **VectraLockdown** and **VectraHealth**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Vectra API Credentials**\n\n Follow these instructions to create a Vectra Client ID and Client Secret.\n 1. Log into your Vectra portal\n 2. Navigate to Manage -> API Clients\n 3. From the API Clients page, select 'Add API Client' to create a new client.\n 4. Add Client Name, select Role and click on Generate Credentials to obtain your client credentials. \n 5. Be sure to record your Client ID and Secret Key for safekeeping. You will need these two pieces of information to obtain an access token from the Vectra API. An access token is required to make requests to all of the Vectra API endpoints.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Vectra Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Vectra Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Vectra Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Create a Keyvault**\n\n Follow these instructions to create a new Keyvault.\n 1. In the Azure portal, Go to **Key vaults** and click on Create.\n 2. Select Subsciption, Resource Group and provide unique name of keyvault.""}, {""title"": """", ""description"": ""**STEP 7 - Create Access Policy in Keyvault**\n\n Follow these instructions to create access policy in Keyvault.\n 1. Go to keyvaults, select your keyvault, go to Access policies on left side panel, click on create.\n 2. Select all keys & secrets permissions. Click next.\n 3. In the principal section, search by application name which was generated in STEP - 2. Click next.\n\n **Note: **Ensure the Permission model in the Access Configuration of Key Vault is set to **'Vault access policy'**""}, {""title"": """", ""description"": ""**STEP 8 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Vectra data connector, have the Vectra API Authorization Credentials readily available..""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Vectra connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace Name \n\t\tVectra Base URL (https://) \n\t\tVectra Client Id - Health \n\t\tVectra Client Secret Key - Health \n\t\tVectra Client Id - Entity Scoring \n\t\tVectra Client Secret - Entity Scoring \n\t\tVectra Client Id - Detections \n\t\tVectra Client Secret - Detections \n\t\tVectra Client Id - Audits \n\t\tVectra Client Secret - Audits \n\t\tVectra Client Id - Lockdown \n\t\tVectra Client Secret - Lockdown \n\t\tVectra Client Id - Host-Entity \n\t\tVectra Client Secret - Host-Entity \n\t\tVectra Client Id - Account-Entity \n\t\tVectra Client Secret - Account-Entity \n\t\tKey Vault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tAzure Entra ObjectID \n\t\tStartTime (in MM/DD/YYYY HH:MM:SS Format) \n\t\tInclude Score Decrease \n\t\tAudits Table Name \n\t\tDetections Table Name \n\t\tEntity Scoring Table Name \n\t\tLockdown Table Name \n\t\tHealth Table Name \n\t\tEntities Table Name \n\t\tExclude Group Details From Detections\n\t\tLog Level (Default: INFO) \n\t\tLockdown Schedule \n\t\tHealth Schedule \n\t\tDetections Schedule \n\t\tAudits Schedule \n\t\tEntity Scoring Schedule \n\t\tEntities Schedule \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Vectra data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-VectraXDR320-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. VECTRAXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tVectra Base URL (https://) \n\t\tVectra Client Id - Health \n\t\tVectra Client Secret Key - Health \n\t\tVectra Client Id - Entity Scoring \n\t\tVectra Client Secret - Entity Scoring \n\t\tVectra Client Id - Detections \n\t\tVectra Client Secret - Detections \n\t\tVectra Client Id - Audits \n\t\tVectra Client Secret - Audits \n\t\tVectra Client Id - Lockdown \n\t\tVectra Client Secret - Lockdown \n\t\tVectra Client Id - Host-Entity \n\t\tVectra Client Secret - Host-Entity \n\t\tVectra Client Id - Account-Entity \n\t\tVectra Client Secret - Account-Entity \n\t\tKey Vault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tStartTime (in MM/DD/YYYY HH:MM:SS Format) \n\t\tInclude Score Decrease \n\t\tAudits Table Name \n\t\tDetections Table Name \n\t\tEntity Scoring Table Name \n\t\tLockdown Table Name \n\t\tHealth Table Name \n\t\tEntities Table Name \n\t\tLog Level (Default: INFO) \n\t\tLockdown Schedule \n\t\tHealth Schedule \n\t\tDetections Schedule \n\t\tAudits Schedule \n\t\tEntity Scoring Schedule \n\t\tEntities Schedule \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Vectra Client ID** and **Client Secret** is required for Health, Entity Scoring, Entities, Detections, Lockdown and Audit data collection. See the documentation to learn more about API on the `https://support.vectra.ai/s/article/KB-VS-1666`.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR/Data%20Connectors/VectraDataConnector/VectraXDR_API_FunctionApp.json","true" +"VeeamAuthorizationEvents_CL","Veeam","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam","veeamsoftware","azure-sentinel-solution-veeamapp","2025-08-26","","3.0.2","Veeam Software","Partner","https://helpcenter.veeam.com/docs/security_plugins_microsoft_sentinel/guide/","","domains","VeeamCustomTablesDataConnector","Veeam","Veeam Data Connector (using Azure Functions)","Veeam Data Connector allows you to ingest Veeam telemetry data from multiple custom tables into Microsoft Sentinel.

The connector supports integration with Veeam Backup & Replication, Veeam ONE and Coveware platforms to provide comprehensive monitoring and security analytics. The data is collected through Azure Functions and stored in custom Log Analytics tables with dedicated Data Collection Rules (DCR) and Data Collection Endpoints (DCE).

**Custom Tables Included:**
- **VeeamMalwareEvents_CL**: Malware detection events from Veeam Backup & Replication
- **VeeamSecurityComplianceAnalyzer_CL**: Security & Compliance Analyzer results collected from Veeam backup infrastructure components
- **VeeamAuthorizationEvents_CL**: Authorization and authentication events
- **VeeamOneTriggeredAlarms_CL**: Triggered alarms from Veeam ONE servers
- **VeeamCovewareFindings_CL**: Security findings from Coveware solution
- **VeeamSessions_CL**: Veeam sessions","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Veeam APIs and pull data into Microsoft Sentinel custom tables. This may result in additional data ingestion costs. See the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - Select the deployment option for Veeam Data Connector and associated Azure Functions**\n\n>**IMPORTANT:** Before you deploy Veeam Data Connector, prepare Workspace Name (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Veeam data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVeeam%2FData%2520Connectors%2Fazuredeploy_Veeam_API_FunctionApp.json)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Microsoft Sentinel Workspace Name**. \n4. Click **Review + Create**, **Create**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Veeam Infrastructure Access"", ""description"": ""Access to Veeam Backup & Replication REST API and Veeam ONE monitoring platform is required. This includes proper authentication credentials and network connectivity.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam/Data%20Connectors/Veeam_API_FunctionApp.json","true" +"VeeamCovewareFindings_CL","Veeam","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam","veeamsoftware","azure-sentinel-solution-veeamapp","2025-08-26","","3.0.2","Veeam Software","Partner","https://helpcenter.veeam.com/docs/security_plugins_microsoft_sentinel/guide/","","domains","VeeamCustomTablesDataConnector","Veeam","Veeam Data Connector (using Azure Functions)","Veeam Data Connector allows you to ingest Veeam telemetry data from multiple custom tables into Microsoft Sentinel.

The connector supports integration with Veeam Backup & Replication, Veeam ONE and Coveware platforms to provide comprehensive monitoring and security analytics. The data is collected through Azure Functions and stored in custom Log Analytics tables with dedicated Data Collection Rules (DCR) and Data Collection Endpoints (DCE).

**Custom Tables Included:**
- **VeeamMalwareEvents_CL**: Malware detection events from Veeam Backup & Replication
- **VeeamSecurityComplianceAnalyzer_CL**: Security & Compliance Analyzer results collected from Veeam backup infrastructure components
- **VeeamAuthorizationEvents_CL**: Authorization and authentication events
- **VeeamOneTriggeredAlarms_CL**: Triggered alarms from Veeam ONE servers
- **VeeamCovewareFindings_CL**: Security findings from Coveware solution
- **VeeamSessions_CL**: Veeam sessions","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Veeam APIs and pull data into Microsoft Sentinel custom tables. This may result in additional data ingestion costs. See the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - Select the deployment option for Veeam Data Connector and associated Azure Functions**\n\n>**IMPORTANT:** Before you deploy Veeam Data Connector, prepare Workspace Name (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Veeam data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVeeam%2FData%2520Connectors%2Fazuredeploy_Veeam_API_FunctionApp.json)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Microsoft Sentinel Workspace Name**. \n4. Click **Review + Create**, **Create**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Veeam Infrastructure Access"", ""description"": ""Access to Veeam Backup & Replication REST API and Veeam ONE monitoring platform is required. This includes proper authentication credentials and network connectivity.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam/Data%20Connectors/Veeam_API_FunctionApp.json","true" +"VeeamMalwareEvents_CL","Veeam","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam","veeamsoftware","azure-sentinel-solution-veeamapp","2025-08-26","","3.0.2","Veeam Software","Partner","https://helpcenter.veeam.com/docs/security_plugins_microsoft_sentinel/guide/","","domains","VeeamCustomTablesDataConnector","Veeam","Veeam Data Connector (using Azure Functions)","Veeam Data Connector allows you to ingest Veeam telemetry data from multiple custom tables into Microsoft Sentinel.

The connector supports integration with Veeam Backup & Replication, Veeam ONE and Coveware platforms to provide comprehensive monitoring and security analytics. The data is collected through Azure Functions and stored in custom Log Analytics tables with dedicated Data Collection Rules (DCR) and Data Collection Endpoints (DCE).

**Custom Tables Included:**
- **VeeamMalwareEvents_CL**: Malware detection events from Veeam Backup & Replication
- **VeeamSecurityComplianceAnalyzer_CL**: Security & Compliance Analyzer results collected from Veeam backup infrastructure components
- **VeeamAuthorizationEvents_CL**: Authorization and authentication events
- **VeeamOneTriggeredAlarms_CL**: Triggered alarms from Veeam ONE servers
- **VeeamCovewareFindings_CL**: Security findings from Coveware solution
- **VeeamSessions_CL**: Veeam sessions","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Veeam APIs and pull data into Microsoft Sentinel custom tables. This may result in additional data ingestion costs. See the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - Select the deployment option for Veeam Data Connector and associated Azure Functions**\n\n>**IMPORTANT:** Before you deploy Veeam Data Connector, prepare Workspace Name (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Veeam data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVeeam%2FData%2520Connectors%2Fazuredeploy_Veeam_API_FunctionApp.json)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Microsoft Sentinel Workspace Name**. \n4. Click **Review + Create**, **Create**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Veeam Infrastructure Access"", ""description"": ""Access to Veeam Backup & Replication REST API and Veeam ONE monitoring platform is required. This includes proper authentication credentials and network connectivity.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam/Data%20Connectors/Veeam_API_FunctionApp.json","true" +"VeeamOneTriggeredAlarms_CL","Veeam","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam","veeamsoftware","azure-sentinel-solution-veeamapp","2025-08-26","","3.0.2","Veeam Software","Partner","https://helpcenter.veeam.com/docs/security_plugins_microsoft_sentinel/guide/","","domains","VeeamCustomTablesDataConnector","Veeam","Veeam Data Connector (using Azure Functions)","Veeam Data Connector allows you to ingest Veeam telemetry data from multiple custom tables into Microsoft Sentinel.

The connector supports integration with Veeam Backup & Replication, Veeam ONE and Coveware platforms to provide comprehensive monitoring and security analytics. The data is collected through Azure Functions and stored in custom Log Analytics tables with dedicated Data Collection Rules (DCR) and Data Collection Endpoints (DCE).

**Custom Tables Included:**
- **VeeamMalwareEvents_CL**: Malware detection events from Veeam Backup & Replication
- **VeeamSecurityComplianceAnalyzer_CL**: Security & Compliance Analyzer results collected from Veeam backup infrastructure components
- **VeeamAuthorizationEvents_CL**: Authorization and authentication events
- **VeeamOneTriggeredAlarms_CL**: Triggered alarms from Veeam ONE servers
- **VeeamCovewareFindings_CL**: Security findings from Coveware solution
- **VeeamSessions_CL**: Veeam sessions","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Veeam APIs and pull data into Microsoft Sentinel custom tables. This may result in additional data ingestion costs. See the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - Select the deployment option for Veeam Data Connector and associated Azure Functions**\n\n>**IMPORTANT:** Before you deploy Veeam Data Connector, prepare Workspace Name (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Veeam data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVeeam%2FData%2520Connectors%2Fazuredeploy_Veeam_API_FunctionApp.json)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Microsoft Sentinel Workspace Name**. \n4. Click **Review + Create**, **Create**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Veeam Infrastructure Access"", ""description"": ""Access to Veeam Backup & Replication REST API and Veeam ONE monitoring platform is required. This includes proper authentication credentials and network connectivity.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam/Data%20Connectors/Veeam_API_FunctionApp.json","true" +"VeeamSecurityComplianceAnalyzer_CL","Veeam","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam","veeamsoftware","azure-sentinel-solution-veeamapp","2025-08-26","","3.0.2","Veeam Software","Partner","https://helpcenter.veeam.com/docs/security_plugins_microsoft_sentinel/guide/","","domains","VeeamCustomTablesDataConnector","Veeam","Veeam Data Connector (using Azure Functions)","Veeam Data Connector allows you to ingest Veeam telemetry data from multiple custom tables into Microsoft Sentinel.

The connector supports integration with Veeam Backup & Replication, Veeam ONE and Coveware platforms to provide comprehensive monitoring and security analytics. The data is collected through Azure Functions and stored in custom Log Analytics tables with dedicated Data Collection Rules (DCR) and Data Collection Endpoints (DCE).

**Custom Tables Included:**
- **VeeamMalwareEvents_CL**: Malware detection events from Veeam Backup & Replication
- **VeeamSecurityComplianceAnalyzer_CL**: Security & Compliance Analyzer results collected from Veeam backup infrastructure components
- **VeeamAuthorizationEvents_CL**: Authorization and authentication events
- **VeeamOneTriggeredAlarms_CL**: Triggered alarms from Veeam ONE servers
- **VeeamCovewareFindings_CL**: Security findings from Coveware solution
- **VeeamSessions_CL**: Veeam sessions","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Veeam APIs and pull data into Microsoft Sentinel custom tables. This may result in additional data ingestion costs. See the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - Select the deployment option for Veeam Data Connector and associated Azure Functions**\n\n>**IMPORTANT:** Before you deploy Veeam Data Connector, prepare Workspace Name (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Veeam data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVeeam%2FData%2520Connectors%2Fazuredeploy_Veeam_API_FunctionApp.json)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Microsoft Sentinel Workspace Name**. \n4. Click **Review + Create**, **Create**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Veeam Infrastructure Access"", ""description"": ""Access to Veeam Backup & Replication REST API and Veeam ONE monitoring platform is required. This includes proper authentication credentials and network connectivity.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam/Data%20Connectors/Veeam_API_FunctionApp.json","true" +"VeeamSessions_CL","Veeam","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam","veeamsoftware","azure-sentinel-solution-veeamapp","2025-08-26","","3.0.2","Veeam Software","Partner","https://helpcenter.veeam.com/docs/security_plugins_microsoft_sentinel/guide/","","domains","VeeamCustomTablesDataConnector","Veeam","Veeam Data Connector (using Azure Functions)","Veeam Data Connector allows you to ingest Veeam telemetry data from multiple custom tables into Microsoft Sentinel.

The connector supports integration with Veeam Backup & Replication, Veeam ONE and Coveware platforms to provide comprehensive monitoring and security analytics. The data is collected through Azure Functions and stored in custom Log Analytics tables with dedicated Data Collection Rules (DCR) and Data Collection Endpoints (DCE).

**Custom Tables Included:**
- **VeeamMalwareEvents_CL**: Malware detection events from Veeam Backup & Replication
- **VeeamSecurityComplianceAnalyzer_CL**: Security & Compliance Analyzer results collected from Veeam backup infrastructure components
- **VeeamAuthorizationEvents_CL**: Authorization and authentication events
- **VeeamOneTriggeredAlarms_CL**: Triggered alarms from Veeam ONE servers
- **VeeamCovewareFindings_CL**: Security findings from Coveware solution
- **VeeamSessions_CL**: Veeam sessions","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Veeam APIs and pull data into Microsoft Sentinel custom tables. This may result in additional data ingestion costs. See the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - Select the deployment option for Veeam Data Connector and associated Azure Functions**\n\n>**IMPORTANT:** Before you deploy Veeam Data Connector, prepare Workspace Name (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Veeam data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVeeam%2FData%2520Connectors%2Fazuredeploy_Veeam_API_FunctionApp.json)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Microsoft Sentinel Workspace Name**. \n4. Click **Review + Create**, **Create**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Veeam Infrastructure Access"", ""description"": ""Access to Veeam Backup & Replication REST API and Veeam ONE monitoring platform is required. This includes proper authentication credentials and network connectivity.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam/Data%20Connectors/Veeam_API_FunctionApp.json","true" +"","Veritas NetBackup","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veritas%20NetBackup","veritas","veritas-sentinel","2023-09-25","","","Veritas Technologies LLC","Partner","https://www.veritas.com/content/support/en_US/contact-us","","domains","","","","","","","","false" +"CommonSecurityLog","VirtualMetric DataStream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VirtualMetric%20DataStream","virtualmetric","azure-sentinel-solution-virtualmetric-datastream","2025-09-15","","","VirtualMetric","Partner","https://support.virtualmetric.com","VirtualMetric","domains","VirtualMetricDirectorProxy","VirtualMetric","VirtualMetric Director Proxy","VirtualMetric Director Proxy deploys an Azure Function App to securely bridge VirtualMetric DataStream with Azure services including Microsoft Sentinel, Azure Data Explorer, and Azure Storage.","[{""title"": ""Deploy VirtualMetric Director Proxy"", ""description"": ""Deploy the Azure Function App that serves as a secure proxy between VirtualMetric DataStream and Microsoft Sentinel."", ""instructions"": [{""type"": ""InstructionStepsGroup"", ""parameters"": {""enable"": true, ""instructionSteps"": [{""title"": ""Prerequisites and Deployment Order"", ""description"": ""**Recommended Deployment Order:**\n\nFor optimal configuration, consider deploying the target connectors first:\n\n1. **Deploy Microsoft Sentinel Connector**: Deploy the VirtualMetric DataStream for Microsoft Sentinel connector first to create the required Data Collection Endpoints and Rules.\n\n2. **Deploy Microsoft Sentinel data lake Connector** (optional): If using Microsoft Sentinel data lake tables, deploy the VirtualMetric DataStream for Microsoft Sentinel data lake connector.\n\n3. **Deploy Director Proxy** (this step): The Director Proxy can then be configured with your Microsoft Sentinel targets.\n\n**Note:** This order is recommended but not required. You can deploy the Director Proxy independently and configure it with your targets later.""}, {""title"": ""Deploy Azure Function App"", ""description"": ""Deploy the VirtualMetric Director Proxy Azure Function App using the Deploy to Azure button.\n\n1. **Deploy to Azure**:\n - Click the Deploy to Azure button below to deploy the Function App:\n - [![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVirtualMetric%2520DataStream%2FData%2520Connectors%2FVirtualMetric-DirectorProxy%2FDeployToAzure.json)\n\n2. **Configure Deployment Parameters**:\n - **Subscription**: Select your Azure subscription\n - **Resource Group**: Choose the same resource group as your Microsoft Sentinel workspace or create a new one\n - **Region**: Select the Azure region (should match your Microsoft Sentinel workspace region)\n - **Function App Name**: Provide a unique name for the Function App (e.g., \""vmetric-director-proxy\"")\n\n3. **Complete Deployment**:\n - Click **Review + create** to validate the parameters\n - Click **Create** to deploy the Function App\n - Wait for deployment to complete (typically 3-5 minutes)\n - Note the Function App URL: `https://.azurewebsites.net`""}, {""title"": ""Configure Function App Permissions"", ""description"": ""Assign the necessary permissions to the Function App's managed identity to access Microsoft Sentinel resources.\n\n1. **Enable System-Assigned Managed Identity**:\n - Navigate to your deployed Function App in Azure Portal\n - Go to **Identity** under Settings\n - Toggle **Status** to **On** for System assigned identity\n - Click **Save** and confirm\n\n2. **Navigate to Resource Group**:\n - Go to the resource group containing your Microsoft Sentinel workspace and Data Collection Endpoints\n\n3. **Assign Required Roles**:\n - Open **Access control (IAM)**\n - Click **+ Add** > **Add role assignment**\n - Assign the following roles to the Function App's system-assigned managed identity:\n - **Monitoring Metrics Publisher**: For sending data to Data Collection Endpoints\n - **Monitoring Reader**: For reading Data Collection Rules configuration\n\n4. **Select the Function App Identity**:\n - In **Members** tab, select **Managed identity**\n - Choose **Function App** and select your deployed Director Proxy Function App\n - Complete the role assignment\n\n5. **Get Function App Access Token** (Optional for Function Key authentication):\n - Navigate to your Function App\n - Go to **App keys** under Functions\n - Copy the default host key or create a new function key for authentication""}, {""title"": ""Configure VirtualMetric DataStream Integration"", ""description"": ""Set up VirtualMetric DataStream to send security telemetry to Microsoft Sentinel through the Director Proxy.\n\n1. **Access VirtualMetric DataStream Configuration**:\n - Log into your **VirtualMetric DataStream** management console\n - Navigate to **Targets** section\n - Click **Microsoft Sentinel Targets**\n - Click **Add new target** or edit an existing Microsoft Sentinel target\n\n2. **Configure General Settings**:\n - **Name**: Enter a name for your target (e.g., \""sentinel-with-proxy\"")\n - **Description**: Optionally provide a description for the target configuration\n\n3. **Configure Azure Authentication**:\n \n **For Service Principal Authentication:**\n - **Managed Identity for Azure**: Keep **Disabled**\n - **Tenant ID**: Enter your Azure Active Directory tenant ID\n - **Client ID**: Enter your service principal application ID\n - **Client Secret**: Enter your service principal client secret\n \n **For Azure Managed Identity:**\n - **Managed Identity for Azure**: Set to **Enabled**\n\n4. **Configure Director Proxy** (in Azure Properties tab):\n - **Endpoint Address**: Enter the Function App URL from Step 2 (format: `https://.azurewebsites.net`)\n - **Access Token**: Enter the Function App host key from Step 3 (optional if using Managed Identity)\n\n5. **Configure Stream Properties**:\n - **Endpoint**: Enter the DCE Logs Ingestion URI (format: `https://..ingest.monitor.azure.com`)\n - **Streams**: Select **Auto** for automatic stream detection, or configure specific streams if needed\n\n6. **Verify Data Ingestion in Microsoft Sentinel**:\n - Return to your **Log Analytics Workspace**\n - Run sample queries to confirm data is being received:\n ```kql\n CommonSecurityLog\n | where TimeGenerated > ago(1h)\n | take 10\n ```\n - Check the **Microsoft Sentinel Overview** dashboard for new data sources and event counts""}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": false}}], ""customs"": [{""name"": ""Azure Function App"", ""description"": ""An Azure Function App must be deployed to host the Director Proxy. Requires read, write, and delete permissions on Microsoft.Web/sites resources within your resource group to create and manage the Function App.""}, {""name"": ""VirtualMetric DataStream Configuration"", ""description"": ""You need VirtualMetric DataStream configured with authentication credentials to connect to the Director Proxy. The Director Proxy acts as a secure bridge between VirtualMetric DataStream and Azure services.""}, {""name"": ""Target Azure Services"", ""description"": ""Configure your target Azure services such as Microsoft Sentinel Data Collection Endpoints, Azure Data Explorer clusters, or Azure Storage accounts where the Director Proxy will forward data.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VirtualMetric%20DataStream/Data%20Connectors/VirtualMetric-DirectorProxy/Template_DirectorProxy.json","true" +"CommonSecurityLog","VirtualMetric DataStream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VirtualMetric%20DataStream","virtualmetric","azure-sentinel-solution-virtualmetric-datastream","2025-09-15","","","VirtualMetric","Partner","https://support.virtualmetric.com","VirtualMetric","domains","VirtualMetricMSSentinelConnector","VirtualMetric","VirtualMetric DataStream for Microsoft Sentinel","VirtualMetric DataStream connector deploys Data Collection Rules to ingest security telemetry into Microsoft Sentinel.","[{""title"": ""Configure VirtualMetric DataStream for Microsoft Sentinel"", ""description"": ""Configure the VirtualMetric DataStream for Microsoft Sentinel to send data."", ""instructions"": [{""type"": ""InstructionStepsGroup"", ""parameters"": {""enable"": true, ""instructionSteps"": [{""title"": ""Register Application in Microsoft Entra ID (Optional)"", ""description"": ""**Choose your authentication method:**\n\n**Option A: Use Azure Managed Identity (Recommended)**\n- Skip this step if you plan to use Azure Managed Identity for authentication.\n- Azure Managed Identity provides a more secure authentication method without managing credentials.\n\n**Option B: Register a Service Principal Application**\n\n1. **Open the [Microsoft Entra ID page](https://entra.microsoft.com/)**:\n - Click the provided link to open the **Microsoft Entra ID** registration page in a new tab.\n - Ensure you are logged in with an account that has **Application Administrator** or **Global Administrator** permissions.\n\n2. **Create a New Application**:\n - In the **Microsoft Entra ID portal**, select **App registrations** from the left-hand navigation.\n - Click on **+ New registration**.\n - Fill out the following fields:\n - **Name**: Enter a descriptive name for the app (e.g., \""VirtualMetric ASIM Connector\"").\n - **Supported account types**: Choose **Accounts in this organizational directory only** (Single tenant).\n - **Redirect URI**: Leave this blank.\n - Click **Register** to create the application.\n\n3. **Copy Application and Tenant IDs**:\n - Once the app is registered, note the **Application (client) ID** and **Directory (tenant) ID** from the **Overview** page. You'll need these for VirtualMetric DataStream configuration.\n\n4. **Create a Client Secret**:\n - In the **Certificates & secrets** section, click **+ New client secret**.\n - Add a description (e.g., 'VirtualMetric ASIM Secret') and set an appropriate expiration period.\n - Click **Add**.\n - **Copy the client secret value immediately**, as it will not be shown again. Store this securely for VirtualMetric DataStream configuration.""}, {""title"": ""Assign Required Permissions"", ""description"": ""Assign the required roles to your chosen authentication method (Service Principal or Managed Identity) in the resource group.\n\n**For Service Principal (if you completed Step 1):**\n\n1. **Navigate to Your Resource Group**:\n - Open the **Azure Portal** and navigate to the **Resource Group** that contains your **Log Analytics Workspace** and where **Data Collection Rules (DCRs)** will be deployed.\n\n2. **Assign the Monitoring Metrics Publisher Role**:\n - In the **Resource Group**, click on **Access control (IAM)** from the left-hand menu.\n - Click **+ Add** and select **Add role assignment**.\n - In the **Role** tab, search for and select **Monitoring Metrics Publisher**.\n - Click **Next** to go to the **Members** tab.\n - Under **Assign access to**, select **User, group, or service principal**.\n - Click **+ Select members** and search for your registered application by name or client ID.\n - Select your application and click **Select**.\n - Click **Review + assign** twice to complete the assignment.\n\n3. **Assign the Monitoring Reader Role**:\n - Repeat the same process to assign the **Monitoring Reader** role:\n - Click **+ Add** and select **Add role assignment**.\n - In the **Role** tab, search for and select **Monitoring Reader**.\n - Follow the same member selection process as above.\n - Click **Review + assign** twice to complete the assignment.\n\n**For Azure Managed Identity:**\n\n1. **Create or Identify Your Managed Identity**:\n - If using **System-assigned Managed Identity**: Enable it on your Azure resource (VM, App Service, etc.).\n - If using **User-assigned Managed Identity**: Create one in your resource group if it doesn't exist.\n\n2. **Assign the Monitoring Metrics Publisher Role**:\n - Follow the same steps as above, but in the **Members** tab:\n - Under **Assign access to**, select **Managed identity**.\n - Click **+ Select members** and choose the appropriate managed identity type and select your identity.\n - Click **Select**, then **Review + assign** twice to complete.\n\n3. **Assign the Monitoring Reader Role**:\n - Repeat the process to assign the **Monitoring Reader** role to the same managed identity.\n\n**Required Permission Summary:**\nThe assigned roles provide the following capabilities:\n- **Monitoring Metrics Publisher**: Write data to Data Collection Endpoints (DCE) and send telemetry through Data Collection Rules (DCR)\n- **Monitoring Reader**: Read stream configuration and access Log Analytics workspace for ASIM table ingestion""}, {""title"": ""Deploy Azure Infrastructure"", ""description"": ""Deploy the required Data Collection Endpoint (DCE) and Data Collection Rules (DCR) for Microsoft Sentinel tables using our ARM template.\n\n1. **Deploy to Azure**:\n - Click the Deploy to Azure button below to automatically deploy the required infrastructure:\n - [![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVirtualMetric%2520DataStream%2FData%2520Connectors%2FVirtualMetric-Sentinel%2FDeployToAzure.json)\n - This will take you directly to the Azure portal to start the deployment.\n\n2. **Configure Deployment Parameters**:\n - On the custom deployment page, configure the following settings:\n \n **Project details:**\n - **Subscription**: Select your Azure subscription from the dropdown\n - **Resource group**: Select an existing resource group or click **Create new** to create a new one\n \n **Instance details:**\n - **Region**: Select the Azure region where your Log Analytics workspace is located (e.g., West Europe)\n - **Workspace**: Enter your Log Analytics workspace name\n - **DCE Name**: Provide a name for the Data Collection Endpoint (e.g., \""vmetric-dce\"")\n - **DCR Name Prefix**: Provide a prefix for the Data Collection Rules (e.g., \""vmetric-dcr\"")\n\n3. **Complete the Deployment**:\n - Click **Review + create** to validate the template.\n - Review the parameters and click **Create** to deploy the resources.\n - Wait for the deployment to complete (typically takes 2-5 minutes).\n\n4. **Verify Deployed Resources**:\n - After deployment, verify the following resources were created:\n - **Data Collection Endpoint (DCE)**: Check **Azure Portal > Monitor > Data Collection Endpoints**\n - **Data Collection Rules (DCRs)**: Check **Azure Portal > Monitor > Data Collection Rules**\n - **Copy the DCE Logs Ingestion URI** from the DCE **Overview** page (format: `https://..ingest.monitor.azure.com`)\n - **Copy the DCE Resource ID** from the DCE **Overview** page (format: `/subscriptions//resourceGroups//providers/Microsoft.Insights/dataCollectionEndpoints/`)\n - For each DCR, note the **Immutable ID** from the **Overview** page - you'll need these for VirtualMetric DataStream configuration.""}, {""title"": ""Configure VirtualMetric DataStream Integration"", ""description"": ""Set up VirtualMetric DataStream to send security telemetry to Microsoft Sentinel tables.\n\n1. **Access VirtualMetric DataStream Configuration**:\n - Log into your **VirtualMetric DataStream** management console.\n - Navigate to **Fleet Management** > **Targets** section.\n - Click **Add new target** button.\n - Select **Microsoft Sentinel** target.\n\n2. **Configure General Settings**:\n - **Name**: Enter a name for your target (e.g., \""cus01-ms-sentinel\"")\n - **Description**: Optionally provide a description for the target configuration\n\n3. **Configure Azure Authentication** (choose based on Step 1):\n \n **For Service Principal Authentication:**\n - **Managed Identity for Azure**: Keep **Disabled**\n - **Tenant ID**: Enter the Directory (tenant) ID from Step 1\n - **Client ID**: Enter the Application (client) ID from Step 1\n - **Client Secret**: Enter the client secret value from Step 1\n \n **For Azure Managed Identity:**\n - **Managed Identity for Azure**: Set to **Enabled**\n\n4. **Configure Stream Properties**:\n - **Endpoint**: Choose your configuration method:\n - **For manual stream configuration**: Enter the DCE Logs Ingestion URI (format: `https://..ingest.monitor.azure.com`)\n - **For auto stream detection**: Enter the DCE Resource ID (format: `/subscriptions//resourceGroups//providers/Microsoft.Insights/dataCollectionEndpoints/`)\n - **Streams**: Select **Auto** for automatic stream detection, or configure specific streams if needed\n\n5. **Verify Data Ingestion in Microsoft Sentinel**:\n - Return to your **Log Analytics Workspace**\n - Run sample queries on the ASIM tables to confirm data is being received:\n ```kql\n ASimNetworkSessionLogs\n | where TimeGenerated > ago(1h)\n | take 10\n ```\n - Check the **Microsoft Sentinel Overview** dashboard for new data sources and event counts.""}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": false}}], ""customs"": [{""name"": ""App Registration or Azure Managed Identity"", ""description"": ""VirtualMetric DataStream requires an Entra ID identity to authenticate and send logs to Microsoft Sentinel. You can choose between creating an App Registration with Client ID and Client Secret, or using Azure Managed Identity for enhanced security without credential management.""}, {""name"": ""Resource Group Role Assignment"", ""description"": ""The chosen identity (App Registration or Managed Identity) must be assigned to the resource group containing the Data Collection Endpoint with the following roles: Monitoring Metrics Publisher (for log ingestion) and Monitoring Reader (for reading stream configuration).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VirtualMetric%20DataStream/Data%20Connectors/VirtualMetric-Sentinel/Template_Sentinel.json","true" +"CommonSecurityLog","VirtualMetric DataStream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VirtualMetric%20DataStream","virtualmetric","azure-sentinel-solution-virtualmetric-datastream","2025-09-15","","","VirtualMetric","Partner","https://support.virtualmetric.com","VirtualMetric","domains","VirtualMetricMSSentinelDataLakeConnector","VirtualMetric","VirtualMetric DataStream for Microsoft Sentinel data lake","VirtualMetric DataStream connector deploys Data Collection Rules to ingest security telemetry into Microsoft Sentinel data lake.","[{""title"": ""Configure VirtualMetric DataStream for Microsoft Sentinel data lake"", ""description"": ""Configure the VirtualMetric DataStream for Microsoft Sentinel data lake to send data."", ""instructions"": [{""type"": ""InstructionStepsGroup"", ""parameters"": {""enable"": true, ""instructionSteps"": [{""title"": ""Register Application in Microsoft Entra ID (Optional)"", ""description"": ""**Choose your authentication method:**\n\n**Option A: Use Azure Managed Identity (Recommended)**\n- Skip this step if you plan to use Azure Managed Identity for authentication.\n- Azure Managed Identity provides a more secure authentication method without managing credentials.\n\n**Option B: Register a Service Principal Application**\n\n1. **Open the [Microsoft Entra ID page](https://entra.microsoft.com/)**:\n - Click the provided link to open the **Microsoft Entra ID** registration page in a new tab.\n - Ensure you are logged in with an account that has **Application Administrator** or **Global Administrator** permissions.\n\n2. **Create a New Application**:\n - In the **Microsoft Entra ID portal**, select **App registrations** from the left-hand navigation.\n - Click on **+ New registration**.\n - Fill out the following fields:\n - **Name**: Enter a descriptive name for the app (e.g., \""VirtualMetric ASIM Connector\"").\n - **Supported account types**: Choose **Accounts in this organizational directory only** (Single tenant).\n - **Redirect URI**: Leave this blank.\n - Click **Register** to create the application.\n\n3. **Copy Application and Tenant IDs**:\n - Once the app is registered, note the **Application (client) ID** and **Directory (tenant) ID** from the **Overview** page. You'll need these for VirtualMetric DataStream configuration.\n\n4. **Create a Client Secret**:\n - In the **Certificates & secrets** section, click **+ New client secret**.\n - Add a description (e.g., 'VirtualMetric ASIM Secret') and set an appropriate expiration period.\n - Click **Add**.\n - **Copy the client secret value immediately**, as it will not be shown again. Store this securely for VirtualMetric DataStream configuration.""}, {""title"": ""Assign Required Permissions"", ""description"": ""Assign the required roles to your chosen authentication method (Service Principal or Managed Identity) in the resource group.\n\n**For Service Principal (if you completed Step 1):**\n\n1. **Navigate to Your Resource Group**:\n - Open the **Azure Portal** and navigate to the **Resource Group** that contains your **Log Analytics Workspace** and where **Data Collection Rules (DCRs)** will be deployed.\n\n2. **Assign the Monitoring Metrics Publisher Role**:\n - In the **Resource Group**, click on **Access control (IAM)** from the left-hand menu.\n - Click **+ Add** and select **Add role assignment**.\n - In the **Role** tab, search for and select **Monitoring Metrics Publisher**.\n - Click **Next** to go to the **Members** tab.\n - Under **Assign access to**, select **User, group, or service principal**.\n - Click **+ Select members** and search for your registered application by name or client ID.\n - Select your application and click **Select**.\n - Click **Review + assign** twice to complete the assignment.\n\n3. **Assign the Monitoring Reader Role**:\n - Repeat the same process to assign the **Monitoring Reader** role:\n - Click **+ Add** and select **Add role assignment**.\n - In the **Role** tab, search for and select **Monitoring Reader**.\n - Follow the same member selection process as above.\n - Click **Review + assign** twice to complete the assignment.\n\n**For Azure Managed Identity:**\n\n1. **Create or Identify Your Managed Identity**:\n - If using **System-assigned Managed Identity**: Enable it on your Azure resource (VM, App Service, etc.).\n - If using **User-assigned Managed Identity**: Create one in your resource group if it doesn't exist.\n\n2. **Assign the Monitoring Metrics Publisher Role**:\n - Follow the same steps as above, but in the **Members** tab:\n - Under **Assign access to**, select **Managed identity**.\n - Click **+ Select members** and choose the appropriate managed identity type and select your identity.\n - Click **Select**, then **Review + assign** twice to complete.\n\n3. **Assign the Monitoring Reader Role**:\n - Repeat the process to assign the **Monitoring Reader** role to the same managed identity.\n\n**Required Permission Summary:**\nThe assigned roles provide the following capabilities:\n- **Monitoring Metrics Publisher**: Write data to Data Collection Endpoints (DCE) and send telemetry through Data Collection Rules (DCR)\n- **Monitoring Reader**: Read stream configuration and access Log Analytics workspace for ASIM table ingestion""}, {""title"": ""Deploy Azure Infrastructure"", ""description"": ""Deploy the required Data Collection Endpoint (DCE) and Data Collection Rules (DCR) for Microsoft Sentinel data lake tables using our ARM template.\n\n1. **Deploy to Azure**:\n - Click the Deploy to Azure button below to automatically deploy the required infrastructure:\n - [![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVirtualMetric%2520DataStream%2FData%2520Connectors%2FVirtualMetric-SentinelDataLake%2FDeployToAzure.json)\n - This will take you directly to the Azure portal to start the deployment.\n\n2. **Configure Deployment Parameters**:\n - On the custom deployment page, configure the following settings:\n \n **Project details:**\n - **Subscription**: Select your Azure subscription from the dropdown\n - **Resource group**: Select an existing resource group or click **Create new** to create a new one\n \n **Instance details:**\n - **Region**: Select the Azure region where your Log Analytics workspace is located (e.g., West Europe)\n - **Workspace**: Enter your Log Analytics workspace name\n - **DCE Name**: Provide a name for the Data Collection Endpoint (e.g., \""vmetric-dce\"")\n - **DCR Name Prefix**: Provide a prefix for the Data Collection Rules (e.g., \""vmetric-dcr\"")\n\n3. **Complete the Deployment**:\n - Click **Review + create** to validate the template.\n - Review the parameters and click **Create** to deploy the resources.\n - Wait for the deployment to complete (typically takes 2-5 minutes).\n\n4. **Verify Deployed Resources**:\n - After deployment, verify the following resources were created:\n - **Data Collection Endpoint (DCE)**: Check **Azure Portal > Monitor > Data Collection Endpoints**\n - **Data Collection Rules (DCRs)**: Check **Azure Portal > Monitor > Data Collection Rules**\n - **Copy the DCE Logs Ingestion URI** from the DCE **Overview** page (format: `https://..ingest.monitor.azure.com`)\n - **Copy the DCE Resource ID** from the DCE **Overview** page (format: `/subscriptions//resourceGroups//providers/Microsoft.Insights/dataCollectionEndpoints/`)\n - For each DCR, note the **Immutable ID** from the **Overview** page - you'll need these for VirtualMetric DataStream configuration.""}, {""title"": ""Configure VirtualMetric DataStream Integration"", ""description"": ""Set up VirtualMetric DataStream to send security telemetry to Microsoft Sentinel data lake tables.\n\n1. **Access VirtualMetric DataStream Configuration**:\n - Log into your **VirtualMetric DataStream** management console.\n - Navigate to **Fleet Management** > **Targets** section.\n - Click **Add new target** button.\n - Select **Microsoft Sentinel** target.\n\n2. **Configure General Settings**:\n - **Name**: Enter a name for your target (e.g., \""cus01-ms-sentinel\"")\n - **Description**: Optionally provide a description for the target configuration\n\n3. **Configure Azure Authentication** (choose based on Step 1):\n \n **For Service Principal Authentication:**\n - **Managed Identity for Azure**: Keep **Disabled**\n - **Tenant ID**: Enter the Directory (tenant) ID from Step 1\n - **Client ID**: Enter the Application (client) ID from Step 1\n - **Client Secret**: Enter the client secret value from Step 1\n \n **For Azure Managed Identity:**\n - **Managed Identity for Azure**: Set to **Enabled**\n\n4. **Configure Stream Properties**:\n - **Endpoint**: Choose your configuration method:\n - **For manual stream configuration**: Enter the DCE Logs Ingestion URI (format: `https://..ingest.monitor.azure.com`)\n - **For auto stream detection**: Enter the DCE Resource ID (format: `/subscriptions//resourceGroups//providers/Microsoft.Insights/dataCollectionEndpoints/`)\n - **Streams**: Select **Auto** for automatic stream detection, or configure specific streams if needed\n\n5. **Verify Data Ingestion in Microsoft Sentinel data lake**:\n - Return to your **Log Analytics Workspace**\n - Run sample queries on the ASIM tables to confirm data is being received:\n ```kql\n ASimNetworkSessionLogs\n | where TimeGenerated > ago(1h)\n | take 10\n ```\n - Check the **Microsoft Sentinel Overview** dashboard for new data sources and event counts.""}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": false}}], ""customs"": [{""name"": ""App Registration or Azure Managed Identity"", ""description"": ""VirtualMetric DataStream requires an Entra ID identity to authenticate and send logs to Microsoft Sentinel data lake. You can choose between creating an App Registration with Client ID and Client Secret, or using Azure Managed Identity for enhanced security without credential management.""}, {""name"": ""Resource Group Role Assignment"", ""description"": ""The chosen identity (App Registration or Managed Identity) must be assigned to the resource group containing the Data Collection Endpoint with the following roles: Monitoring Metrics Publisher (for log ingestion) and Monitoring Reader (for reading stream configuration).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VirtualMetric%20DataStream/Data%20Connectors/VirtualMetric-SentinelDataLake/Template_SentinelDataLake.json","true" +"","VirusTotal","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VirusTotal","azuresentinel","azure-sentinel-solution-virustotal","2022-07-31","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"CommonSecurityLog","Votiro","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Votiro","votirocybersecltd1670174946024","votiro_data_connector","","","","Votiro","Partner","https://support.votiro.com/","","domains","Votiro","Votiro","[Deprecated] Votiro Sanitization Engine Logs","The Votiro data connector allows you to easily connect your Votiro Event logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. Using Votiro on Microsoft Sentinel will provide you more insights into the sanitization results of files.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set Votiro Endpoints to send Syslog messages in CEF format to the Forwarder machine. Make sure you to send the logs to port 514 TCP on the Forwarder machine's IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Votiro/Data%20Connectors/VotiroEvents.json","true" +"Syslog","Watchguard Firebox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Watchguard%20Firebox","watchguard-technologies","watchguard_firebox_mss","2022-05-06","","","WatchGuard","Partner","https://www.watchguard.com/wgrd-support/contact-support","","domains","WatchguardFirebox","WatchGuard Technologies","[Deprecated] WatchGuard Firebox","WatchGuard Firebox (https://www.watchguard.com/wgrd-products/firewall-appliances and https://www.watchguard.com/wgrd-products/cloud-and-virtual-firewalls) is security products/firewall-appliances. Watchguard Firebox will send syslog to Watchguard Firebox collector agent.The agent then sends the message to the workspace.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias WatchGuardFirebox and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Watchguard%20Firebox/Parsers/WatchGuardFirebox.txt) on the second line of the query, enter the hostname(s) of your WatchGuard Firebox device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n2. Select **Apply below configuration to my machines** and select the facilities and severities.\n3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Watchguard%20Firebox/Data%20Connectors/Connector_syslog_WatchGuardFirebox.json","true" +"","Watchlists Utilities","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Watchlists%20Utilities","azuresentinel","azure-sentinel-solution-watchlistsutilities","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"","Web Session Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Web%20Session%20Essentials","azuresentinel","azure-sentinel-solution-websession-domain","2023-06-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","Web Shells Threat Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Web%20Shells%20Threat%20Protection","azuresentinel","azure-sentinel-solution-webshellsthreatprotection","2022-05-22","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"","Windows Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Firewall","azuresentinel","azure-sentinel-solution-windowsfirewall","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","Windows Forwarded Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Forwarded%20Events","azuresentinel","azure-sentinel-solution-windowsforwardedevents","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","Windows Security Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Security%20Events","azuresentinel","azure-sentinel-solution-securityevents","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","Windows Server DNS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Server%20DNS","azuresentinel","azure-sentinel-solution-dns","2022-05-11","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"CommonSecurityLog","WireX Network Forensics Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/WireX%20Network%20Forensics%20Platform","wirexsystems1584682625009","wirex_network_forensics_platform_mss","2022-05-06","","","WireX Systems","Partner","https://wirexsystems.com/contact-us/","","domains","WireX_Systems_NFP","WireX_Systems","[Deprecated] WireX Network Forensics Platform via Legacy Agent","The WireX Systems data connector allows security professional to integrate with Microsoft Sentinel to allow you to further enrich your forensics investigations; to not only encompass the contextual content offered by WireX but to analyze data from other sources, and to create custom dashboards to give the most complete picture during a forensic investigation and to create custom workflows.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Contact WireX support (https://wirexsystems.com/contact-us/) in order to configure your NFP solution to send Syslog messages in CEF format to the proxy machine. Make sure that they central manager can send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/WireX%20Network%20Forensics%20Platform/Data%20Connectors/WireXsystemsNFP%281b%29.json","true" +"CommonSecurityLog","WireX Network Forensics Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/WireX%20Network%20Forensics%20Platform","wirexsystems1584682625009","wirex_network_forensics_platform_mss","2022-05-06","","","WireX Systems","Partner","https://wirexsystems.com/contact-us/","","domains","WireX_Systems_NFPAma","WireX_Systems","[Deprecated] WireX Network Forensics Platform via AMA","The WireX Systems data connector allows security professional to integrate with Microsoft Sentinel to allow you to further enrich your forensics investigations; to not only encompass the contextual content offered by WireX but to analyze data from other sources, and to create custom dashboards to give the most complete picture during a forensic investigation and to create custom workflows.","[{""title"": """", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine""}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Contact WireX support (https://wirexsystems.com/contact-us/) in order to configure your NFP solution to send Syslog messages in CEF format to the proxy machine. Make sure that they central manager can send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/WireX%20Network%20Forensics%20Platform/Data%20Connectors/template_WireXsystemsNFPAMA.json","true" +"CommonSecurityLog","WithSecureElementsViaConnector","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/WithSecureElementsViaConnector","withsecurecorporation","sentinel-solution-withsecure-via-connector","2022-11-03","2022-11-03","","WithSecure","Partner","https://www.withsecure.com/en/support","","domains","WithSecureElementsViaConnector","WithSecure","[Deprecated] WithSecure Elements via Connector","WithSecure Elements is a unified cloud-based cyber security platform.
By connecting WithSecure Elements via Connector to Microsoft Sentinel, security events can be received in Common Event Format (CEF) over syslog.
It requires deploying ""Elements Connector"" either on-prem or in cloud.
The Common Event Format (CEF) provides natively search & correlation, alerting and threat intelligence enrichment for each data log.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your WithSecurity solution and Sentinel. The machine can be on-prem environment, Microsoft Azure or other cloud based.\n> Linux needs to have `syslog-ng` and `python`/`python3` installed.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""For python3 use command below:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python3 cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward data from WithSecure Elements Connector to Syslog agent"", ""description"": ""This describes how to install and configure Elements Connector step by step."", ""innerSteps"": [{""title"": ""2.1 Order Connector subscription"", ""description"": ""If Connector subscription has not been ordered yet go to EPP in Elements Portal. Then navigate to Downloads and in Elements Connector section click 'Create subscription key' button. You can check Your subscription key in Subscriptions.""}, {""title"": ""2.2 Download Connector"", ""description"": ""Go to Downloads and in WithSecure Elements Connector section select correct installer.""}, {""title"": ""2.3 Create management API key"", ""description"": ""When in EPP open account settings in top right corner. Then select Get management API key. If key has been created earlier it can be read there as well.""}, {""title"": ""2.4 Install Connector"", ""description"": ""To install Elements Connector follow [Elements Connector Docs](https://www.withsecure.com/userguides/product.html#business/connector/latest/en/).""}, {""title"": ""2.5 Configure event forwarding"", ""description"": ""If api access has not been configured during installation follow [Configuring API access for Elements Connector](https://www.withsecure.com/userguides/product.html#business/connector/latest/en/task_F657F4D0F2144CD5913EE510E155E234-latest-en).\nThen go to EPP, then Profiles, then use For Connector from where you can see the connector profiles. Create a new profile (or edit an existing not read-only profile). In Event forwarding enable it. SIEM system address: **127.0.0.1:514**. Set format to **Common Event Format**. Protocol is **TCP**. Save profile and assign it to Elements Connector in Devices tab.""}]}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""For python3 use command below:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python3 cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/WithSecureElementsViaConnector/Data%20Connectors/WithSecureElementsViaConnector.json","true" +"WsSecurityEvents_CL","WithSecureElementsViaFunction","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/WithSecureElementsViaFunction","withsecurecorporation","sentinel-solution-withsecure-via-function","2024-02-22","2025-04-25","","WithSecure","Partner","https://www.withsecure.com/en/support","","domains","WithSecureElementsViaFunction","WithSecure","WithSecure Elements API (Azure Function)","WithSecure Elements is the unified cloud-based cyber security platform designed to reduce risk, complexity, and inefficiency.

Elevate your security from your endpoints to your cloud applications. Arm yourself against every type of cyber threat, from targeted attacks to zero-day ransomware.

WithSecure Elements combines powerful predictive, preventive, and responsive security capabilities - all managed and monitored through a single security center. Our modular structure and flexible pricing models give you the freedom to evolve. With our expertise and insight, you'll always be empowered - and you'll never be alone.

With Microsoft Sentinel integration, you can correlate [security events](https://connect.withsecure.com/api-reference/security-events#overview) data from the WithSecure Elements solution with data from other sources, enabling a rich overview of your entire environment and faster reaction to threats.

With this solution Azure Function is deployed to your tenant, polling periodically for the WithSecure Elements security events.

For more information visit our website at: [https://www.withsecure.com](https://www.withsecure.com).","[{""title"": ""1. Create WithSecure Elements API credentials"", ""description"": ""Follow the [user guide](https://connect.withsecure.com/getting-started/elements#getting-client-credentials) to create Elements API credentials. Save credentials in a safe place.""}, {""title"": ""2. Create Microsoft Entra application"", ""description"": ""Create new Microsoft Entra application and credentials. Follow [the instructions](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-microsoft-entra-application) and store values of **Directory (tenant) ID**, **Object ID**, **Application (client) ID** and **Client Secret** (from client credentials field). Remember to store Client Secret in a safe place.""}, {""title"": ""3. Deploy Function App"", ""description"": "">**NOTE:** This connector uses Azure Functions to pull logs from WithSecure Elements. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store Microsoft Entra client credentials and WithSecure Elements API client credentials in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**IMPORTANT:** Before deploying the WithSecure Elements connector, have the Workspace Name (can be copied from the following), data from Microsoft Entra (Directory (tenant) ID, Object ID, Application (client) ID and Client Secret), as well as the WithSecure Elements client credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""workspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Deploy all the resources related to the connector"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-WithSecureElementsViaFunction-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Entra Client ID**, **Entra Client Secret**, **Entra Tenant ID**, **Elements API Client ID**, **Elements API Client Secret**.\n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. You can also fill in optional fields: **Elements API url**, **Engine**, **Engine Group**. Use default value of **Elements API url** unless you have some special case. **Engine** and **Engine Group** map to [security events request parameters](https://connect.withsecure.com/api-reference/elements#post-/security-events/v1/security-events), fill in those parameters if you are interested only in events from specific engine or engine group, in case you want to receive all security events leave the fields with default values.\n5. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n6. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""WithSecure Elements API client credentials"", ""description"": ""Client credentials are required. [See the documentation to learn more.](https://connect.withsecure.com/getting-started/elements#getting-client-credentials)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/WithSecureElementsViaFunction/Data%20Connectors/WithSecureElementsViaFunction.json","true" +"WizAuditLogsV2_CL","Wiz","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz","wizinc1627338511749","wizinc1627338511749_wiz_mss-sentinel","2023-06-20","","","Wiz","Partner","https://support.wiz.io/","","domains","Wiz","Wiz","Wiz","The Wiz connector allows you to easily send Wiz Issues, Vulnerability Findings, and Audit logs to Microsoft Sentinel.","[{""description"": "">**NOTE:** This connector: Uses Azure Functions to connect to Wiz API to pull Wiz Issues, Vulnerability Findings, and Audit Logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\nCreates an Azure Key Vault with all the required parameters stored as secrets.""}, {""description"": ""\nFollow the instructions on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz) to get the erquired credentials."", ""title"": ""STEP 1 - Get your Wiz credentials""}, {""description"": ""\n>**IMPORTANT:** Before deploying the Wiz Connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Wiz credentials from the previous step."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}], ""title"": ""STEP 2 - Deploy the connector and the associated Azure Function""}, {""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-wiz-azuredeploy) \n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following parameters: \n> - Choose **KeyVaultName** and **FunctionName** for the new resources \n >- Enter the following Wiz credentials from step 1: **WizAuthUrl**, **WizEndpointUrl**, **WizClientId**, and **WizClientSecret** \n>- Enter the Workspace credentials **AzureLogsAnalyticsWorkspaceId** and **AzureLogAnalyticsWorkspaceSharedKey**\n>- Choose the Wiz data types you want to send to Microsoft Sentinel, choose at least one from **Wiz Issues**, **Vulnerability Findings**, and **Audit Logs**.\n \n>- (optional) follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#optional-create-a-filter-for-wiz-queries) to add **IssuesQueryFilter**, **VulnerbailitiesQueryFilter**, and **AuditLogsQueryFilter**.\n \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n"", ""title"": ""Option 1: Deploy using the Azure Resource Manager (ARM) Template""}, {""description"": "">Follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#manual-deployment) to deploy the connector manually."", ""title"": ""Option 2: Manual Deployment of the Azure Function""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Wiz Service Account credentials"", ""description"": ""Ensure you have your Wiz service account client ID and client secret, API endpoint URL, and auth URL. Instructions can be found on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz/Data%20Connectors/template_WIZ.json","true" +"WizAuditLogs_CL","Wiz","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz","wizinc1627338511749","wizinc1627338511749_wiz_mss-sentinel","2023-06-20","","","Wiz","Partner","https://support.wiz.io/","","domains","Wiz","Wiz","Wiz","The Wiz connector allows you to easily send Wiz Issues, Vulnerability Findings, and Audit logs to Microsoft Sentinel.","[{""description"": "">**NOTE:** This connector: Uses Azure Functions to connect to Wiz API to pull Wiz Issues, Vulnerability Findings, and Audit Logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\nCreates an Azure Key Vault with all the required parameters stored as secrets.""}, {""description"": ""\nFollow the instructions on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz) to get the erquired credentials."", ""title"": ""STEP 1 - Get your Wiz credentials""}, {""description"": ""\n>**IMPORTANT:** Before deploying the Wiz Connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Wiz credentials from the previous step."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}], ""title"": ""STEP 2 - Deploy the connector and the associated Azure Function""}, {""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-wiz-azuredeploy) \n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following parameters: \n> - Choose **KeyVaultName** and **FunctionName** for the new resources \n >- Enter the following Wiz credentials from step 1: **WizAuthUrl**, **WizEndpointUrl**, **WizClientId**, and **WizClientSecret** \n>- Enter the Workspace credentials **AzureLogsAnalyticsWorkspaceId** and **AzureLogAnalyticsWorkspaceSharedKey**\n>- Choose the Wiz data types you want to send to Microsoft Sentinel, choose at least one from **Wiz Issues**, **Vulnerability Findings**, and **Audit Logs**.\n \n>- (optional) follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#optional-create-a-filter-for-wiz-queries) to add **IssuesQueryFilter**, **VulnerbailitiesQueryFilter**, and **AuditLogsQueryFilter**.\n \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n"", ""title"": ""Option 1: Deploy using the Azure Resource Manager (ARM) Template""}, {""description"": "">Follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#manual-deployment) to deploy the connector manually."", ""title"": ""Option 2: Manual Deployment of the Azure Function""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Wiz Service Account credentials"", ""description"": ""Ensure you have your Wiz service account client ID and client secret, API endpoint URL, and auth URL. Instructions can be found on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz/Data%20Connectors/template_WIZ.json","true" +"WizIssuesV2_CL","Wiz","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz","wizinc1627338511749","wizinc1627338511749_wiz_mss-sentinel","2023-06-20","","","Wiz","Partner","https://support.wiz.io/","","domains","Wiz","Wiz","Wiz","The Wiz connector allows you to easily send Wiz Issues, Vulnerability Findings, and Audit logs to Microsoft Sentinel.","[{""description"": "">**NOTE:** This connector: Uses Azure Functions to connect to Wiz API to pull Wiz Issues, Vulnerability Findings, and Audit Logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\nCreates an Azure Key Vault with all the required parameters stored as secrets.""}, {""description"": ""\nFollow the instructions on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz) to get the erquired credentials."", ""title"": ""STEP 1 - Get your Wiz credentials""}, {""description"": ""\n>**IMPORTANT:** Before deploying the Wiz Connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Wiz credentials from the previous step."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}], ""title"": ""STEP 2 - Deploy the connector and the associated Azure Function""}, {""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-wiz-azuredeploy) \n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following parameters: \n> - Choose **KeyVaultName** and **FunctionName** for the new resources \n >- Enter the following Wiz credentials from step 1: **WizAuthUrl**, **WizEndpointUrl**, **WizClientId**, and **WizClientSecret** \n>- Enter the Workspace credentials **AzureLogsAnalyticsWorkspaceId** and **AzureLogAnalyticsWorkspaceSharedKey**\n>- Choose the Wiz data types you want to send to Microsoft Sentinel, choose at least one from **Wiz Issues**, **Vulnerability Findings**, and **Audit Logs**.\n \n>- (optional) follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#optional-create-a-filter-for-wiz-queries) to add **IssuesQueryFilter**, **VulnerbailitiesQueryFilter**, and **AuditLogsQueryFilter**.\n \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n"", ""title"": ""Option 1: Deploy using the Azure Resource Manager (ARM) Template""}, {""description"": "">Follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#manual-deployment) to deploy the connector manually."", ""title"": ""Option 2: Manual Deployment of the Azure Function""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Wiz Service Account credentials"", ""description"": ""Ensure you have your Wiz service account client ID and client secret, API endpoint URL, and auth URL. Instructions can be found on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz/Data%20Connectors/template_WIZ.json","true" +"WizIssues_CL","Wiz","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz","wizinc1627338511749","wizinc1627338511749_wiz_mss-sentinel","2023-06-20","","","Wiz","Partner","https://support.wiz.io/","","domains","Wiz","Wiz","Wiz","The Wiz connector allows you to easily send Wiz Issues, Vulnerability Findings, and Audit logs to Microsoft Sentinel.","[{""description"": "">**NOTE:** This connector: Uses Azure Functions to connect to Wiz API to pull Wiz Issues, Vulnerability Findings, and Audit Logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\nCreates an Azure Key Vault with all the required parameters stored as secrets.""}, {""description"": ""\nFollow the instructions on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz) to get the erquired credentials."", ""title"": ""STEP 1 - Get your Wiz credentials""}, {""description"": ""\n>**IMPORTANT:** Before deploying the Wiz Connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Wiz credentials from the previous step."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}], ""title"": ""STEP 2 - Deploy the connector and the associated Azure Function""}, {""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-wiz-azuredeploy) \n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following parameters: \n> - Choose **KeyVaultName** and **FunctionName** for the new resources \n >- Enter the following Wiz credentials from step 1: **WizAuthUrl**, **WizEndpointUrl**, **WizClientId**, and **WizClientSecret** \n>- Enter the Workspace credentials **AzureLogsAnalyticsWorkspaceId** and **AzureLogAnalyticsWorkspaceSharedKey**\n>- Choose the Wiz data types you want to send to Microsoft Sentinel, choose at least one from **Wiz Issues**, **Vulnerability Findings**, and **Audit Logs**.\n \n>- (optional) follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#optional-create-a-filter-for-wiz-queries) to add **IssuesQueryFilter**, **VulnerbailitiesQueryFilter**, and **AuditLogsQueryFilter**.\n \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n"", ""title"": ""Option 1: Deploy using the Azure Resource Manager (ARM) Template""}, {""description"": "">Follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#manual-deployment) to deploy the connector manually."", ""title"": ""Option 2: Manual Deployment of the Azure Function""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Wiz Service Account credentials"", ""description"": ""Ensure you have your Wiz service account client ID and client secret, API endpoint URL, and auth URL. Instructions can be found on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz/Data%20Connectors/template_WIZ.json","true" +"WizVulnerabilitiesV2_CL","Wiz","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz","wizinc1627338511749","wizinc1627338511749_wiz_mss-sentinel","2023-06-20","","","Wiz","Partner","https://support.wiz.io/","","domains","Wiz","Wiz","Wiz","The Wiz connector allows you to easily send Wiz Issues, Vulnerability Findings, and Audit logs to Microsoft Sentinel.","[{""description"": "">**NOTE:** This connector: Uses Azure Functions to connect to Wiz API to pull Wiz Issues, Vulnerability Findings, and Audit Logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\nCreates an Azure Key Vault with all the required parameters stored as secrets.""}, {""description"": ""\nFollow the instructions on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz) to get the erquired credentials."", ""title"": ""STEP 1 - Get your Wiz credentials""}, {""description"": ""\n>**IMPORTANT:** Before deploying the Wiz Connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Wiz credentials from the previous step."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}], ""title"": ""STEP 2 - Deploy the connector and the associated Azure Function""}, {""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-wiz-azuredeploy) \n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following parameters: \n> - Choose **KeyVaultName** and **FunctionName** for the new resources \n >- Enter the following Wiz credentials from step 1: **WizAuthUrl**, **WizEndpointUrl**, **WizClientId**, and **WizClientSecret** \n>- Enter the Workspace credentials **AzureLogsAnalyticsWorkspaceId** and **AzureLogAnalyticsWorkspaceSharedKey**\n>- Choose the Wiz data types you want to send to Microsoft Sentinel, choose at least one from **Wiz Issues**, **Vulnerability Findings**, and **Audit Logs**.\n \n>- (optional) follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#optional-create-a-filter-for-wiz-queries) to add **IssuesQueryFilter**, **VulnerbailitiesQueryFilter**, and **AuditLogsQueryFilter**.\n \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n"", ""title"": ""Option 1: Deploy using the Azure Resource Manager (ARM) Template""}, {""description"": "">Follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#manual-deployment) to deploy the connector manually."", ""title"": ""Option 2: Manual Deployment of the Azure Function""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Wiz Service Account credentials"", ""description"": ""Ensure you have your Wiz service account client ID and client secret, API endpoint URL, and auth URL. Instructions can be found on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz/Data%20Connectors/template_WIZ.json","true" +"WizVulnerabilities_CL","Wiz","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz","wizinc1627338511749","wizinc1627338511749_wiz_mss-sentinel","2023-06-20","","","Wiz","Partner","https://support.wiz.io/","","domains","Wiz","Wiz","Wiz","The Wiz connector allows you to easily send Wiz Issues, Vulnerability Findings, and Audit logs to Microsoft Sentinel.","[{""description"": "">**NOTE:** This connector: Uses Azure Functions to connect to Wiz API to pull Wiz Issues, Vulnerability Findings, and Audit Logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\nCreates an Azure Key Vault with all the required parameters stored as secrets.""}, {""description"": ""\nFollow the instructions on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz) to get the erquired credentials."", ""title"": ""STEP 1 - Get your Wiz credentials""}, {""description"": ""\n>**IMPORTANT:** Before deploying the Wiz Connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Wiz credentials from the previous step."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}], ""title"": ""STEP 2 - Deploy the connector and the associated Azure Function""}, {""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-wiz-azuredeploy) \n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following parameters: \n> - Choose **KeyVaultName** and **FunctionName** for the new resources \n >- Enter the following Wiz credentials from step 1: **WizAuthUrl**, **WizEndpointUrl**, **WizClientId**, and **WizClientSecret** \n>- Enter the Workspace credentials **AzureLogsAnalyticsWorkspaceId** and **AzureLogAnalyticsWorkspaceSharedKey**\n>- Choose the Wiz data types you want to send to Microsoft Sentinel, choose at least one from **Wiz Issues**, **Vulnerability Findings**, and **Audit Logs**.\n \n>- (optional) follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#optional-create-a-filter-for-wiz-queries) to add **IssuesQueryFilter**, **VulnerbailitiesQueryFilter**, and **AuditLogsQueryFilter**.\n \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n"", ""title"": ""Option 1: Deploy using the Azure Resource Manager (ARM) Template""}, {""description"": "">Follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#manual-deployment) to deploy the connector manually."", ""title"": ""Option 2: Manual Deployment of the Azure Function""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Wiz Service Account credentials"", ""description"": ""Ensure you have your Wiz service account client ID and client secret, API endpoint URL, and auth URL. Instructions can be found on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz/Data%20Connectors/template_WIZ.json","true" +"ASimAuditEventLogs","Workday","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Workday","azuresentinel","azure-sentinel-solution-workday","2024-02-15","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","WorkdayCCPDefinition","Microsoft","Workday User Activity","The [Workday](https://www.workday.com/) User Activity data connector provides the capability to ingest User Activity Logs from [Workday API](https://community.workday.com/sites/default/files/file-hosting/restapi/index.html#privacy/v1/get-/activityLogging) into Microsoft Sentinel.","[{""description"": ""1) In Workday, access the \""Edit Tenant Setup - Security\"" task, verify \""OAuth 2.0 Settings\"" section, make sure that the \""OAuth 2.0 Clients Enabled\"" check box is ticked. \n 2) In Workday, access the \""Edit Tenant Setup - System\"" task, verify \""User Activity Logging\"" section, make sure that the \""Enable User Activity Logging\"" check box is ticked. \n 3) In Workday, access the \""Register API Client\"" task.\n 4) Define the Client Name, select the \""Client Grant Type\"": \""Authorization Code Grant\"" and then select \""Access Token Type\"": \""Bearer\""\n 5) Enter the \""Redirection URI\"": https://portal.azure.com/TokenAuthorize/ExtensionName/Microsoft_Azure_Security_Insights \n 6) In section \""Scope (Functional Areas)\"", select \""System\"" and click OK at the bottom \n 7) Copy the Client ID and Client Secret before navigating away from the page, and store it securely. \n 8) In Sentinel, in the connector page - provide required Token, Authorization and User Activity Logs Endpoints, along with Client ID and Client Secret from previous step. Then click \""Connect\"". \n 9) A Workday pop up will appear to complete the OAuth2 authentication and authorization of the API client. Here you need to provide credentials for Workday account with \""System Auditing\"" permissions in Workday (can be either Workday account or Integration System User). \n 10) Once that's complete, the message will be displayed to authorize your API client \n"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Token Endpoint"", ""placeholder"": ""https://wd2-impl-services1.workday.com/ccx/oauth2/{tenantName}/token"", ""type"": ""text"", ""name"": ""tokenEndpoint""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Authorization Endpoint"", ""placeholder"": ""https://impl.workday.com/{tenantName}/authorize"", ""type"": ""text"", ""name"": ""authorizationEndpoint""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""User Activity Logs Endpoint, it ends with /activityLogging "", ""placeholder"": ""https://wd2-impl-services1.workday.com/ccx/api/privacy/v1/{tenantName}/activityLogging"", ""type"": ""text"", ""name"": ""apiEndpoint""}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}], ""title"": ""Connect to Workday to start collecting user activity logs in Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Workday User Activity API access"", ""description"": ""Access to the Workday user activity API through Oauth are required. The API Client needs to have the scope: System and it needs to be authorized by an account with System Auditing permissions.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Workday/Data%20Connectors/Workday_ccp/Workday_DataConnectorDefinition.json","true" +"Workplace_Facebook_CL","Workplace from Facebook","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Workplace%20from%20Facebook","azuresentinel","azure-sentinel-solution-workplacefromfacebook","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","WorkplaceFacebook","Facebook","Workplace from Facebook","The [Workplace](https://www.workplace.com/) data connector provides the capability to ingest common Workplace events into Microsoft Sentinel through Webhooks. Webhooks enable custom integration apps to subscribe to events in Workplace and receive updates in real time. When a change occurs in Workplace, an HTTPS POST request with event information is sent to a callback data connector URL. Refer to [Webhooks documentation](https://developers.facebook.com/docs/workplace/reference/webhooks) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This data connector uses Azure Functions based on HTTP Trigger for waiting POST requests with logs to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias WorkplaceFacebook and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Workplace%20from%20Facebook/Parsers/Workplace_Facebook.txt) on the second line of the query, enter the hostname(s) of your Workplace Facebook device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Workplace**\n\n Follow the instructions to configure Webhooks.\n\n1. Log in to the Workplace with Admin user credentials.\n2. In the Admin panel, click **Integrations**.\n3. In the **All integrations** view, click **Create custom integration**\n4. Enter the name and description and click **Create**.\n5. In the **Integration details** panel show **App secret** and copy.\n6. In the **Integration permissions** pannel set all read permissions. Refer to [permission page](https://developers.facebook.com/docs/workplace/reference/permissions) for details.\n7. Now proceed to STEP 2 to follow the steps (listed in Option 1 or 2) to Deploy the Azure Function.\n8. Enter the requested parameters and also enter a Token of choice. Copy this Token / Note it for the upcoming step.\n9. After the deployment of Azure Functions completes successfully, open Function App page, select your app, go to **Functions**, click **Get Function URL** and copy this / Note it for the upcoming step.\n10. Go back to Workplace from Facebook. In the **Configure webhooks** panel on each Tab set **Callback URL** as the same value that you copied in point 9 above and Verify token as the same\n value you copied in point 8 above which was obtained during STEP 2 of Azure Functions deployment.\n11. Click Save.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Workplace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Workplace data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-WorkplaceFacebook-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-WorkplaceFacebook-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **WorkplaceVerifyToken** (can be any expression, copy and save it for STEP 1), **WorkplaceAppSecret** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n6. After deploying open Function App page, select your app, go to the **Functions** and click **Get Function Url** copy it and follow p.7 from STEP 1.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Sophos Endpoint Protection data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-WorkplaceFacebook-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkplaceAppSecret\n\t\tWorkplaceVerifyToken\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Webhooks Credentials/permissions"", ""description"": ""WorkplaceAppSecret, WorkplaceVerifyToken, Callback URL are required for working Webhooks. See the documentation to learn more about [configuring Webhooks](https://developers.facebook.com/docs/workplace/reference/webhooks), [configuring permissions](https://developers.facebook.com/docs/workplace/reference/permissions). ""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Workplace%20from%20Facebook/Data%20Connectors/WorkplaceFacebook/WorkplaceFacebook_Webhooks_FunctionApp.json","true" +"ZeroFoxAlertPoller_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxAlertsDefinition","ZeroFox Enterprise","ZeroFox Enterprise - Alerts (Polling CCF)","Collects alerts from ZeroFox API.","[{""description"": ""Connect ZeroFox to Microsoft Sentinel"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Provide your ZeroFox PAT"", ""placeholder"": ""Zerofox PAT"", ""type"": ""password"", ""name"": ""apikey"", ""validations"": {""required"": true}}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""title"": ""Connect ZeroFox to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/solutions"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true, ""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""ZeroFox Personal Access Token (PAT)"", ""description"": ""A ZeroFox PAT is required. You can get it in Data Connectors > [API Data Feeds](https://cloud.zerofox.com/data_connectors/api).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/Alerts/ZeroFoxAlerts_ConnectorDefinition.json","true" +"ZeroFox_CTI_C2_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZeroFox_CTI_advanced_dark_web_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZeroFox_CTI_botnet_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZeroFox_CTI_breaches_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZeroFox_CTI_compromised_credentials_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZeroFox_CTI_credit_cards_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZeroFox_CTI_dark_web_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZeroFox_CTI_discord_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZeroFox_CTI_disruption_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZeroFox_CTI_email_addresses_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZeroFox_CTI_exploits_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZeroFox_CTI_irc_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZeroFox_CTI_malware_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZeroFox_CTI_national_ids_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZeroFox_CTI_phishing_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZeroFox_CTI_phone_numbers_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZeroFox_CTI_ransomware_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZeroFox_CTI_telegram_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZeroFox_CTI_threat_actors_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZeroFox_CTI_vulnerabilities_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZNSegmentAuditNativePoller_CL","ZeroNetworks","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroNetworks","zeronetworksltd1629013803351","azure-sentinel-solution-znsegmentaudit","2022-06-06","2025-09-17","","Zero Networks","Partner","https://zeronetworks.com","","domains","ZeroNetworksSegmentAuditNativePoller","Zero Networks","Zero Networks Segment Audit","The [Zero Networks Segment](https://zeronetworks.com/) Audit data connector provides the capability to ingest Zero Networks Audit events into Microsoft Sentinel through the REST API. This data connector uses Microsoft Sentinel native polling capability.","[{""title"": ""Connect Zero Networks to Microsoft Sentinel"", ""description"": ""Enable Zero Networks audit Logs."", ""instructions"": [{""parameters"": {""enable"": ""true""}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Zero Networks API Token"", ""description"": ""**ZeroNetworksAPIToken** is required for REST API. See the API Guide and follow the instructions for obtaining credentials.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroNetworks/Data%20Connectors/SegmentNativePollerConnector/azuredeploy_ZeroNetworks_Segment_native_poller_connector.json","true" +"","ZeroTrust(TIC3.0)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroTrust%28TIC3.0%29","azuresentinel","azure-sentinel-solution-zerotrust","2021-10-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"ZimperiumMitigationLog_CL","Zimperium Mobile Threat Defense","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zimperium%20Mobile%20Threat%20Defense","zimperiuminc","zimperium_mobile_threat_defense_mss","2022-05-02","","","Zimperium","Partner","https://www.zimperium.com/support/","","domains","ZimperiumMtdAlerts","Zimperium","Zimperium Mobile Threat Defense","Zimperium Mobile Threat Defense connector gives you the ability to connect the Zimperium threat log with Microsoft Sentinel to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's mobile threat landscape and enhances your security operation capabilities.","[{""title"": ""Configure and connect Zimperium MTD"", ""description"": ""1. In zConsole, click **Manage** on the navigation bar.\n2. Click the **Integrations** tab.\n3. Click the **Threat Reporting** button and then the **Add Integrations** button.\n4. Create the Integration:\n - From the available integrations, select Microsoft Sentinel.\n - Enter your workspace id and primary key from the fields below, click **Next**.\n - Fill in a name for your Microsoft Sentinel integration.\n - Select a Filter Level for the threat data you wish to push to Microsoft Sentinel.\n - Click **Finish**\n5. For additional instructions, please refer to the [Zimperium customer support portal](https://support.zimperium.com)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zimperium%20Mobile%20Threat%20Defense/Data%20Connectors/Zimperium%20MTD%20Alerts.json","true" +"ZimperiumThreatLog_CL","Zimperium Mobile Threat Defense","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zimperium%20Mobile%20Threat%20Defense","zimperiuminc","zimperium_mobile_threat_defense_mss","2022-05-02","","","Zimperium","Partner","https://www.zimperium.com/support/","","domains","ZimperiumMtdAlerts","Zimperium","Zimperium Mobile Threat Defense","Zimperium Mobile Threat Defense connector gives you the ability to connect the Zimperium threat log with Microsoft Sentinel to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's mobile threat landscape and enhances your security operation capabilities.","[{""title"": ""Configure and connect Zimperium MTD"", ""description"": ""1. In zConsole, click **Manage** on the navigation bar.\n2. Click the **Integrations** tab.\n3. Click the **Threat Reporting** button and then the **Add Integrations** button.\n4. Create the Integration:\n - From the available integrations, select Microsoft Sentinel.\n - Enter your workspace id and primary key from the fields below, click **Next**.\n - Fill in a name for your Microsoft Sentinel integration.\n - Select a Filter Level for the threat data you wish to push to Microsoft Sentinel.\n - Click **Finish**\n5. For additional instructions, please refer to the [Zimperium customer support portal](https://support.zimperium.com)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zimperium%20Mobile%20Threat%20Defense/Data%20Connectors/Zimperium%20MTD%20Alerts.json","true" +"","Zinc Open Source","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zinc%20Open%20Source","azuresentinel","azure-sentinel-solution-zincopensource","2022-10-03","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"Zoom_CL","ZoomReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZoomReports","azuresentinel","azure-sentinel-solution-zoomreports","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","Zoom","Zoom","Zoom Reports","The [Zoom](https://zoom.us/) Reports data connector provides the capability to ingest [Zoom Reports](https://developers.zoom.us/docs/api/rest/reference/zoom-api/methods/#tag/Reports) events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://developers.zoom.us/docs/api/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Zoom API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Zoom and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZoomReports/Parsers/Zoom.yaml). The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Zoom API**\n\n [Follow the instructions](https://developers.zoom.us/docs/internal-apps/create/) to obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Zoom Reports data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Zoom Audit data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ZoomAPI-azuredeployV2) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-ZoomAPI-azuredeployV2-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Region**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **AccountID**, **ClientID**, **ClientSecret**, **WorkspaceID**, **WorkspaceKey**, **Function Name** and click Review + create. \n4. Finally click **Create** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Zoom Reports data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-ZoomAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ZoomXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAccountID\n\t\tClientID\n\t\tClientSecret\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**AccountID**, **ClientID** and **ClientSecret** are required for Zoom API. [See the documentation to learn more about Zoom API](https://developers.zoom.us/docs/internal-apps/create/). [Follow the instructions for Zoom API configurations](https://aka.ms/sentinel-zoomreports-readme).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZoomReports/Data%20Connectors/ZoomReports_API_FunctionApp.json","true" +"","Zscaler Internet Access","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zscaler%20Internet%20Access","zscaler1579058425289","zscaler_internet_access_mss","2022-05-25","","","Zscaler","Partner","https://help.zscaler.com/submit-ticket-links","","domains","","","","","","","","false" +"ZPA_CL","Zscaler Private Access (ZPA)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zscaler%20Private%20Access%20%28ZPA%29","azuresentinel","azure-sentinel-solution-zscalerprivateaccess","2022-01-31","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ZscalerPrivateAccess","Zscaler","[Deprecated] Zscaler Private Access","The [Zscaler Private Access (ZPA)](https://help.zscaler.com/zpa/what-zscaler-private-access) data connector provides the capability to ingest [Zscaler Private Access events](https://help.zscaler.com/zpa/log-streaming-service) into Microsoft Sentinel. Refer to [Zscaler Private Access documentation](https://help.zscaler.com/zpa) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-ZscalerPrivateAccess-parser) to create the Kusto Functions alias, **ZPAEvent**"", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using Zscaler Private Access version: 21.67.1"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Server where the Zscaler Private Access logs are forwarded.\n\n> Logs from Zscaler Private Access Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Follow the configuration steps below to get Zscaler Private Access logs into Microsoft Sentinel. Refer to the [Azure Monitor Documentation](https://docs.microsoft.com/azure/azure-monitor/agents/data-sources-json) for more details on these steps.\nZscaler Private Access logs are delivered via Log Streaming Service (LSS). Refer to [LSS documentation](https://help.zscaler.com/zpa/about-log-streaming-service) for detailed information\n1. Configure [Log Receivers](https://help.zscaler.com/zpa/configuring-log-receiver). While configuring a Log Receiver, choose **JSON** as **Log Template**.\n2. Download config file [zpa.conf](https://aka.ms/sentinel-ZscalerPrivateAccess-conf) \n\t\twget -v https://aka.ms/sentinel-zscalerprivateaccess-conf -O zpa.conf\n3. Login to the server where you have installed Azure Log Analytics agent.\n4. Copy zpa.conf to the /etc/opt/microsoft/omsagent/**workspace_id**/conf/omsagent.d/ folder.\n5. Edit zpa.conf as follows:\n\n\t a. specify port which you have set your Zscaler Log Receivers to forward logs to (line 4)\n\n\t b. zpa.conf uses the port **22033** by default. Ensure this port is not being used by any other source on your server\n\n\t c. If you would like to change the default port for **zpa.conf** make sure that it should not get conflict with default AMA agent ports I.e.(For example CEF uses TCP port **25226** or **25224**) \n\n\t d. replace **workspace_id** with real value of your Workspace ID (lines 14,15,16,19)\n5. Save changes and restart the Azure Log Analytics agent for Linux service with the following command:\n\t\tsudo /opt/microsoft/omsagent/bin/service_control restart"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zscaler%20Private%20Access%20%28ZPA%29/Data%20Connectors/Connector_LogAnalytics_agent_Zscaler_ZPA.json","true" +"NCProtectUAL_CL","archTIS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/archTIS","nucleuscyber","nc-protect-azure-sentinel-data-connector","2021-10-20","","","archTIS","Partner","https://www.archtis.com/nc-protect-support/","","domains","NucleusCyberNCProtect","archTIS","NC Protect","[NC Protect Data Connector (archtis.com)](https://info.archtis.com/get-started-with-nc-protect-sentinel-data-connector) provides the capability to ingest user activity logs and events into Microsoft Sentinel. The connector provides visibility into NC Protect user activity logs and events in Microsoft Sentinel to improve monitoring and investigation capabilities","[{""title"": """", ""description"": ""1. Install NC Protect into your Azure Tenancy\n2. Log into the NC Protect Administration site\n3. From the left hand navigation menu, select General -> User Activity Monitoring\n4. Tick the checkbox to Enable SIEM and click the Configure button\n5. Select Microsoft Sentinel as the Application and complete the configuration using the information below\n6. Click Save to activate the connection\n"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""NC Protect"", ""description"": ""You must have a running instance of NC Protect for O365. Please [contact us](https://www.archtis.com/data-discovery-classification-protection-software-secure-collaboration/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/archTIS/Data%20Connectors/NucleusCyberNCProtect.json","true" +"CommonSecurityLog","iboss","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/iboss","iboss","iboss-sentinel-connector","2022-02-15","","","iboss","Partner","https://www.iboss.com/contact-us/","","domains","iboss","iboss","[Deprecated] iboss via Legacy Agent","The [iboss](https://www.iboss.com) data connector enables you to seamlessly connect your Threat Console to Microsoft Sentinel and enrich your instance with iboss URL event logs. Our logs are forwarded in Common Event Format (CEF) over Syslog and the configuration required can be completed on the iboss platform without the use of a proxy. Take advantage of our connector to garner critical data points and gain insight into security threats.","[{""title"": ""1. Configure a dedicated proxy Linux machine"", ""description"": ""If using the iboss gov environment or there is a preference to forward the logs to a dedicated proxy Linux machine, proceed with this step. In all other cases, please advance to step two."", ""innerSteps"": [{""title"": ""1.1 Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace""}, {""title"": ""1.2 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the dedicated proxy Linux machine between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.3 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n> 2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs"", ""description"": ""Set your Threat Console to send Syslog messages in CEF format to your Azure workspace. Make note of your Workspace ID and Primary Key within your Log Analytics Workspace (Select the workspace from the Log Analytics workspaces menu in the Azure portal. Then select Agents management in the Settings section). \n\n>1. Navigate to Reporting & Analytics inside your iboss Console\n\n>2. Select Log Forwarding -> Forward From Reporter\n\n>3. Select Actions -> Add Service\n\n>4. Toggle to Microsoft Sentinel as a Service Type and input your Workspace ID/Primary Key along with other criteria. If a dedicated proxy Linux machine has been configured, toggle to Syslog as a Service Type and configure the settings to point to your dedicated proxy Linux machine\n\n>5. Wait one to two minutes for the setup to complete\n\n>6. Select your Microsoft Sentinel Service and verify the Microsoft Sentinel Setup Status is Successful. If a dedicated proxy Linux machine has been configured, you may proceed with validating your connection""}, {""title"": ""3. Validate connection"", ""description"": ""Open Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace""}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy (Only applicable if a dedicated proxy Linux machine has been configured).\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/iboss/Data%20Connectors/iboss_cef.json","true" +"CommonSecurityLog","iboss","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/iboss","iboss","iboss-sentinel-connector","2022-02-15","","","iboss","Partner","https://www.iboss.com/contact-us/","","domains","ibossAma","iboss","iboss via AMA","The [iboss](https://www.iboss.com) data connector enables you to seamlessly connect your Threat Console to Microsoft Sentinel and enrich your instance with iboss URL event logs. Our logs are forwarded in Common Event Format (CEF) over Syslog and the configuration required can be completed on the iboss platform without the use of a proxy. Take advantage of our connector to garner critical data points and gain insight into security threats.","[{""title"": ""Configure AMA Data Connector"", ""description"": ""Steps to configure the iboss AMA Data Connector"", ""instructions"": [{""parameters"": {""title"": ""Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Gather Required Configuration Details in Azure Arc"", ""description"": ""1. Navigate to Azure Arc ---> Azure Arc Resources ---> Machines.\n\n2. Add a machine ---> Add a single server ---> Generate script.\n\n3. Select the resource group, this should be the same group as the Log Analytics Workspace for your Microsoft Sentinel instance you will be using\n\n4. Select a region and ensure it is in the same region as your Log Analytics Workspace\n\n5. Select Linux as Operating System\n\n6. Click Next\n\n7. Download the script and use this information for the next step when configuring your Microsoft Sentinel AMA integration iboss side.\n\n8. Navigate to the Log Analytics Workspace of your Microsoft Sentinel instance and find it's resource group, workspace name, and workspace id""}, {""title"": ""Step B. Forward Common Event Format (CEF) logs"", ""description"": ""Set your Threat Console to send Syslog messages in CEF format to your Azure workspace. (Ensure you have the information gathered from the previous section)\n\n>1. Navigate to the Integrations Marketplace inside your iboss Console\n\n>2. Select Microsoft Sentinel AMA Log Forwarding\n\n>3. Select Add Integration\n\n4. Use the information from the script and your log analytics workspace to configure the integration.\n\n5. Add the integration\n\n>6. An email with be sent to your iboss alerts email to authenticate. Please do so within five minutes\n\n7. After authenticating, wait 15 to 20 minutes and ensure the Microsoft Sentinel Status of your integration is successful.""}, {""title"": ""Step C. Validate connection"", ""description"": ""1. Follow the instructions to validate your connectivity:\n\n2. Open Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n3. It may take about 20 minutes until the connection streams data to your workspace.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/iboss/Data%20Connectors/template_ibossAMA.json","true" +"CommonSecurityLog","vArmour Application Controller","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/vArmour%20Application%20Controller","varmournetworks","varmour_sentinel","2022-06-01","","","vArmour Networks","Partner","https://www.varmour.com/contact-us/","","domains","vArmourAC","vArmour","[Deprecated] vArmour Application Controller via Legacy Agent","vArmour reduces operational risk and increases cyber resiliency by visualizing and controlling application relationships across the enterprise. This vArmour connector enables streaming of Application Controller Violation Alerts into Microsoft Sentinel, so you can take advantage of search & correlation, alerting, & threat intelligence enrichment for each log.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Configure the vArmour Application Controller to forward Common Event Format (CEF) logs to the Syslog agent"", ""description"": ""Send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address."", ""innerSteps"": [{""title"": ""2.1 Download the vArmour Application Controller user guide"", ""description"": ""Download the user guide from https://support.varmour.com/hc/en-us/articles/360057444831-vArmour-Application-Controller-6-0-User-Guide.""}, {""title"": ""2.2 Configure the Application Controller to Send Policy Violations"", ""description"": ""In the user guide - refer to \""Configuring Syslog for Monitoring and Violations\"" and follow steps 1 to 3.""}]}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/vArmour%20Application%20Controller/Data%20Connectors/Connector_vArmour_AppController_CEF.json","true" +"CommonSecurityLog","vArmour Application Controller","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/vArmour%20Application%20Controller","varmournetworks","varmour_sentinel","2022-06-01","","","vArmour Networks","Partner","https://www.varmour.com/contact-us/","","domains","vArmourACAma","vArmour","[Deprecated] vArmour Application Controller via AMA","vArmour reduces operational risk and increases cyber resiliency by visualizing and controlling application relationships across the enterprise. This vArmour connector enables streaming of Application Controller Violation Alerts into Microsoft Sentinel, so you can take advantage of search & correlation, alerting, & threat intelligence enrichment for each log.","[{""title"": """", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine""}, {""title"": ""Step B. Configure the vArmour Application Controller to forward Common Event Format (CEF) logs to the Syslog agent"", ""description"": ""Send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address."", ""innerSteps"": [{""title"": ""1 Download the vArmour Application Controller user guide"", ""description"": ""Download the user guide from https://support.varmour.com/hc/en-us/articles/360057444831-vArmour-Application-Controller-6-0-User-Guide.""}, {""title"": ""2 Configure the Application Controller to Send Policy Violations"", ""description"": ""In the user guide - refer to \""Configuring Syslog for Monitoring and Violations\"" and follow steps 1 to 3.""}]}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/vArmour%20Application%20Controller/Data%20Connectors/template_vArmour_AppControllerAMA.json","true" diff --git a/Workbooks/WorkbooksMetadata.json b/Workbooks/WorkbooksMetadata.json index 7fa6c4f4a0f..0dc89bc0851 100644 --- a/Workbooks/WorkbooksMetadata.json +++ b/Workbooks/WorkbooksMetadata.json @@ -9747,7 +9747,7 @@ "GDPRComplianceAndDataSecurityWhite.png", "GDPRComplianceAndDataSecurityBlack.png" ], - "version": "1.0.0", + "version": "1.0.1", "title": "GDPR Compliance And Data Security (Preview)", "templateRelativePath": "GDPRComplianceAndDataSecurity.json", "subtitle": "",